Skip to main content

shared/extensions/
distr.rs

1use anyhow::Context;
2use ignore::gitignore::GitignoreBuilder;
3use serde::{Deserialize, Serialize};
4use std::{
5    collections::BTreeMap,
6    io::{Read, Write},
7    path::Path,
8    sync::Arc,
9};
10use utoipa::ToSchema;
11use zip::write::FileOptions;
12
13#[derive(Clone, ToSchema, Deserialize, Serialize)]
14pub struct MetadataToml {
15    pub package_name: String,
16    pub name: String,
17    #[schema(value_type = String)]
18    pub panel_version: semver::VersionReq,
19}
20
21impl MetadataToml {
22    /// Get the package identifier for this extension.
23    /// This is derived from the package name by replacing `.` with `_`.
24    ///
25    /// Example: `com.example.myextension` becomes `com_example_myextension`.
26    #[inline]
27    pub fn get_package_identifier(&self) -> String {
28        Self::convert_package_name_to_identifier(&self.package_name)
29    }
30
31    /// Convert a package name to an identifier by replacing `.` with `_`.
32    ///
33    /// Example: `com.example.myextension` becomes `com_example_myextension`.
34    #[inline]
35    pub fn convert_package_name_to_identifier(package_name: &str) -> String {
36        package_name.replace('.', "_")
37    }
38
39    /// Validate that a package identifier matches the same constraints as the package name
40    /// validation in `ExtensionDistrFile::validate`, but with `_` instead of `.` as the separator.
41    #[inline]
42    pub fn is_valid_package_identifier(identifier: &str) -> bool {
43        let mut segments = identifier.split('_');
44        let tld = segments.next();
45        let author = segments.next();
46        let ident = segments.next();
47
48        if segments.next().is_some() {
49            return false;
50        }
51
52        let Some(tld) = tld else { return false };
53        let Some(author) = author else { return false };
54        let Some(ident) = ident else { return false };
55
56        if !(2..=6).contains(&tld.len()) {
57            return false;
58        }
59        if !(3..=30).contains(&author.len()) {
60            return false;
61        }
62        if !(4..=30).contains(&ident.len()) {
63            return false;
64        }
65
66        if !tld.chars().all(|c| c.is_ascii_lowercase()) {
67            return false;
68        }
69        if !author
70            .chars()
71            .all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '-')
72        {
73            return false;
74        }
75        if !ident
76            .chars()
77            .all(|c| c.is_ascii_lowercase() || c.is_ascii_digit() || c == '-')
78        {
79            return false;
80        }
81
82        true
83    }
84
85    /// Convert an identifier to a package name by replacing `_` with `.`.
86    ///
87    /// Example: `com_example_myextension` becomes `com.example.myextension`.
88    #[inline]
89    pub fn convert_identifier_to_package_name(identifier: &str) -> String {
90        identifier.replace('_', ".")
91    }
92}
93
94#[derive(Clone, Deserialize, Serialize)]
95pub struct CargoPackage {
96    pub description: String,
97    pub authors: Vec<String>,
98    pub version: semver::Version,
99}
100
101#[derive(Clone, Deserialize, Serialize)]
102pub struct CargoToml {
103    pub package: CargoPackage,
104    pub dependencies: BTreeMap<String, toml::Value>,
105}
106
107#[derive(Clone, Deserialize, Serialize)]
108pub struct PackageJson {
109    pub dependencies: BTreeMap<String, String>,
110}
111
112#[derive(Clone)]
113pub struct ExtensionMigration {
114    pub id: uuid::Uuid,
115    pub name: String,
116    pub date: chrono::DateTime<chrono::Utc>,
117    pub sql: String,
118    pub sql_down: String,
119}
120
121impl ExtensionMigration {
122    pub fn from_directory_raw(
123        path: &Path,
124        extension_identifier: &str,
125        mut content_up_raw: impl std::io::Read,
126        mut content_down_raw: impl std::io::Read,
127    ) -> Result<Self, std::io::Error> {
128        let mut content_up = String::new();
129        content_up_raw.read_to_string(&mut content_up)?;
130
131        let mut content_down = String::new();
132        content_down_raw.read_to_string(&mut content_down)?;
133
134        let name = path
135            .file_name()
136            .ok_or_else(|| {
137                std::io::Error::new(
138                    std::io::ErrorKind::InvalidData,
139                    format!(
140                        "invalid migration directory name `{}`: unable to extract directory name.",
141                        path.display()
142                    ),
143                )
144            })?
145            .to_string_lossy()
146            .to_string();
147
148        if !name.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') {
149            return Err(std::io::Error::new(
150                std::io::ErrorKind::InvalidData,
151                format!(
152                    "invalid migration directory name `{}`: must be ASCII and contain only alphanumeric characters or underscores.",
153                    path.display()
154                ),
155            ));
156        }
157
158        let date = name
159            .split('_')
160            .next()
161            .and_then(|date_str| {
162                chrono::NaiveDateTime::parse_from_str(date_str, "%Y%m%d%H%M%S")
163                    .ok()
164                    .map(|ndt| ndt.and_utc())
165            })
166            .ok_or_else(|| {
167                std::io::Error::new(
168                    std::io::ErrorKind::InvalidData,
169                    format!(
170                        "invalid migration directory name `{}`: unable to parse date from directory name. expected format `yyyymmddhhmmss_description/`.",
171                        path.display()
172                    ),
173                )
174            })?;
175
176        let xxh3 = xxhash_rust::xxh3::xxh3_128(
177            format!(
178                "{}:{}:{}",
179                extension_identifier,
180                name,
181                date.timestamp_millis()
182            )
183            .as_bytes(),
184        );
185        let id = uuid::Builder::from_u128(xxh3)
186            .with_variant(uuid::Variant::RFC4122)
187            .with_version(uuid::Version::Custom)
188            .into_uuid();
189
190        Ok(Self {
191            id,
192            name,
193            date,
194            sql: content_up,
195            sql_down: content_down,
196        })
197    }
198}
199
200#[derive(Clone)]
201pub struct ExtensionDistrFile {
202    zip: zip::ZipArchive<Arc<std::fs::File>>,
203
204    pub metadata_toml: MetadataToml,
205    pub cargo_toml: CargoToml,
206    pub package_json: PackageJson,
207}
208
209impl ExtensionDistrFile {
210    pub fn parse_from_reader(file: std::fs::File) -> Result<Self, anyhow::Error> {
211        let mut zip = zip::ZipArchive::new(Arc::new(file))?;
212
213        let mut metadata_toml = zip.by_name("Metadata.toml")?;
214        let mut metadata_toml_bytes = vec![0; metadata_toml.size() as usize];
215        metadata_toml.read_exact(&mut metadata_toml_bytes)?;
216        drop(metadata_toml);
217        let metadata_toml: MetadataToml = toml::from_slice(&metadata_toml_bytes)?;
218
219        let mut cargo_toml = zip.by_name("backend/Cargo.toml")?;
220        let mut cargo_toml_bytes = vec![0; cargo_toml.size() as usize];
221        cargo_toml.read_exact(&mut cargo_toml_bytes)?;
222        drop(cargo_toml);
223
224        let cargo_toml: CargoToml = toml::from_slice(&cargo_toml_bytes)?;
225
226        let mut package_json = zip.by_name("frontend/package.json")?;
227        let mut package_json_bytes = vec![0; package_json.size() as usize];
228        package_json.read_exact(&mut package_json_bytes)?;
229        drop(package_json);
230
231        let package_json: PackageJson = serde_json::from_slice(&package_json_bytes)?;
232
233        let mut this = Self {
234            zip,
235            metadata_toml,
236            cargo_toml,
237            package_json,
238        };
239        this.validate()?;
240
241        Ok(this)
242    }
243
244    pub fn extract_backend(&mut self, path: impl AsRef<Path>) -> Result<(), anyhow::Error> {
245        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
246
247        let mut i = 0;
248        while let Ok(mut entry) = self.zip.by_index(i) {
249            i += 1;
250
251            if !entry.name().starts_with("backend/") {
252                continue;
253            }
254
255            let clean_path = match entry.enclosed_name() {
256                Some(clean_path) => clean_path,
257                None => continue,
258            };
259            let clean_path = match clean_path.strip_prefix("backend/") {
260                Ok(clean_path) => clean_path,
261                Err(_) => continue,
262            };
263
264            if entry.is_dir() {
265                filesystem.create_dir_all(clean_path)?;
266            } else if entry.is_file() {
267                let mut file = filesystem.create(clean_path)?;
268
269                std::io::copy(&mut entry, &mut file)?;
270                file.flush()?;
271                file.sync_all()?;
272            }
273        }
274
275        filesystem.write(
276            "Metadata.toml",
277            toml::to_string_pretty(&self.metadata_toml)?.into_bytes(),
278        )?;
279
280        Ok(())
281    }
282
283    pub fn extract_frontend(&mut self, path: impl AsRef<Path>) -> Result<(), anyhow::Error> {
284        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
285
286        let mut i = 0;
287        while let Ok(mut entry) = self.zip.by_index(i) {
288            i += 1;
289
290            if !entry.name().starts_with("frontend/") {
291                continue;
292            }
293
294            let clean_path = match entry.enclosed_name() {
295                Some(clean_path) => clean_path,
296                None => continue,
297            };
298            let clean_path = match clean_path.strip_prefix("frontend/") {
299                Ok(clean_path) => clean_path,
300                Err(_) => continue,
301            };
302
303            if entry.is_dir() {
304                filesystem.create_dir_all(clean_path)?;
305            } else if entry.is_file() {
306                let mut file = filesystem.create(clean_path)?;
307
308                std::io::copy(&mut entry, &mut file)?;
309                file.flush()?;
310                file.sync_all()?;
311            }
312        }
313
314        Ok(())
315    }
316
317    pub fn has_migrations(&mut self) -> bool {
318        self.zip.by_name("migrations/").is_ok()
319    }
320
321    pub fn extract_migrations(&mut self, path: impl AsRef<Path>) -> Result<(), anyhow::Error> {
322        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
323
324        let mut i = 0;
325        while let Ok(mut entry) = self.zip.by_index(i) {
326            i += 1;
327
328            if !entry.name().starts_with("migrations/") {
329                continue;
330            }
331
332            let clean_path = match entry.enclosed_name() {
333                Some(clean_path) => clean_path,
334                None => continue,
335            };
336            let clean_path = match clean_path.strip_prefix("migrations/") {
337                Ok(clean_path) => clean_path,
338                Err(_) => continue,
339            };
340
341            if entry.is_dir() {
342                filesystem.create_dir_all(clean_path)?;
343            } else if entry.is_file() {
344                let mut file = filesystem.create(clean_path)?;
345
346                std::io::copy(&mut entry, &mut file)?;
347                file.flush()?;
348                file.sync_all()?;
349            }
350        }
351
352        Ok(())
353    }
354
355    pub fn get_migrations(&mut self) -> Result<Vec<ExtensionMigration>, anyhow::Error> {
356        let mut migrations = Vec::new();
357
358        let mut migration_dirs = Vec::new();
359        let mut i = 0;
360        while let Ok(entry) = self.zip.by_index(i) {
361            i += 1;
362
363            let entry_name = entry.name().to_string();
364            if entry_name.starts_with("migrations/")
365                && entry_name.ends_with("/up.sql")
366                && !entry.is_dir()
367                && let Some(dir_name) = entry_name
368                    .strip_prefix("migrations/")
369                    .and_then(|s| s.strip_suffix("/up.sql"))
370            {
371                migration_dirs.push(dir_name.to_string());
372            }
373        }
374
375        for dir_name in migration_dirs {
376            let up_path = Path::new("migrations").join(&dir_name).join("up.sql");
377            let down_path = Path::new("migrations").join(&dir_name).join("down.sql");
378
379            let mut up_entry = self.zip.by_path(&up_path)?;
380            let mut up_bytes = vec![0; up_entry.size() as usize];
381            up_entry.read_exact(&mut up_bytes)?;
382            drop(up_entry);
383
384            let mut down_entry = self.zip.by_path(&down_path)?;
385            let mut down_bytes = vec![0; down_entry.size() as usize];
386            down_entry.read_exact(&mut down_bytes)?;
387            drop(down_entry);
388
389            let dir_path = Path::new(&dir_name);
390
391            migrations.push(ExtensionMigration::from_directory_raw(
392                dir_path,
393                &self.metadata_toml.get_package_identifier(),
394                &up_bytes[..],
395                &down_bytes[..],
396            )?);
397        }
398
399        Ok(migrations)
400    }
401
402    pub fn validate(&mut self) -> Result<(), anyhow::Error> {
403        const MUST_EXIST_DIRECTORIES: &[&str] =
404            &["backend/", "backend/src/", "frontend/", "frontend/src/"];
405        const MUST_EXIST_FILES: &[&str] = &[
406            "Metadata.toml",
407            "backend/Cargo.toml",
408            "backend/src/lib.rs",
409            "frontend/package.json",
410        ];
411
412        let mut package_segments = self.metadata_toml.package_name.split('.');
413        let tld_segment = package_segments.next().ok_or_else(|| {
414            anyhow::anyhow!("invalid package name in calagopus extension archive. (too few segments, expected 3)")
415        })?;
416        let author_segment = package_segments.next().ok_or_else(|| {
417            anyhow::anyhow!("invalid package name in calagopus extension archive. (too few segments, expected 3)")
418        })?;
419        let identifier_segment = package_segments.next().ok_or_else(|| {
420            anyhow::anyhow!("invalid package name in calagopus extension archive. (too few segments, expected 3)")
421        })?;
422
423        if package_segments.next().is_some() {
424            return Err(anyhow::anyhow!(
425                "invalid package name in calagopus extension archive. (too many segments, expected 3)"
426            ));
427        }
428
429        if tld_segment.len() < 2 || tld_segment.len() > 6 {
430            return Err(anyhow::anyhow!(
431                "invalid tld segment `{}` in calagopus extension archive package name.",
432                tld_segment
433            ));
434        }
435
436        if author_segment.len() < 3 || author_segment.len() > 30 {
437            return Err(anyhow::anyhow!(
438                "invalid author segment `{}` in calagopus extension archive package name.",
439                author_segment
440            ));
441        }
442
443        if identifier_segment.len() < 4 || identifier_segment.len() > 30 {
444            return Err(anyhow::anyhow!(
445                "invalid identifier segment `{}` in calagopus extension archive package name.",
446                identifier_segment
447            ));
448        }
449
450        for c in tld_segment.chars() {
451            if !c.is_ascii_lowercase() {
452                return Err(anyhow::anyhow!(
453                    "invalid character `{c}` in tld segment of calagopus extension archive package name."
454                ));
455            }
456        }
457
458        for c in author_segment.chars() {
459            if !c.is_ascii_lowercase() && !c.is_ascii_digit() && c != '-' {
460                return Err(anyhow::anyhow!(
461                    "invalid character `{c}` in author segment of calagopus extension archive package name."
462                ));
463            }
464        }
465
466        for c in identifier_segment.chars() {
467            if !c.is_ascii_lowercase() && !c.is_ascii_digit() && c != '-' {
468                return Err(anyhow::anyhow!(
469                    "invalid character `{c}` in identifier segment of calagopus extension archive package name."
470                ));
471            }
472        }
473
474        for dir in MUST_EXIST_DIRECTORIES {
475            if self.zip.by_name(dir).ok().is_none_or(|e| !e.is_dir()) {
476                return Err(anyhow::anyhow!(
477                    "unable to find directory `{dir}` in calagopus extension archive."
478                ));
479            }
480        }
481
482        for file in MUST_EXIST_FILES {
483            if self.zip.by_name(file).ok().is_none_or(|e| !e.is_file()) {
484                return Err(anyhow::anyhow!(
485                    "unable to find file `{file}` in calagopus extension archive."
486                ));
487            }
488        }
489
490        if self.zip.by_name("frontend/src/index.ts").is_err()
491            && self.zip.by_name("frontend/src/index.tsx").is_err()
492        {
493            return Err(anyhow::anyhow!(
494                "unable to find file `frontend/src/index.ts` or `frontend/src/index.tsx` in calagopus extension archive."
495            ));
496        }
497
498        {
499            let mut lib = self.zip.by_name("backend/src/lib.rs")?;
500            let mut lib_string = String::new();
501            lib_string.reserve_exact(lib.size() as usize);
502            lib.read_to_string(&mut lib_string)?;
503            drop(lib);
504
505            if !lib_string.contains("pub struct ExtensionStruct") {
506                return Err(anyhow::anyhow!(
507                    "unable to find `pub struct ExtensionStruct` in calagopus extension archive backend/src/lib.rs."
508                ));
509            }
510        }
511
512        {
513            let mut index = if let Ok(index) = self.zip.by_name("frontend/src/index.ts") {
514                index
515            } else {
516                self.zip.by_name("frontend/src/index.tsx")?
517            };
518            let mut index_string = String::new();
519            index_string.reserve_exact(index.size() as usize);
520            index.read_to_string(&mut index_string)?;
521            drop(index);
522
523            if !index_string.contains("export default ") {
524                return Err(anyhow::anyhow!(
525                    "unable to find `export default ` in calagopus extension archive frontend/src/index.ts."
526                ));
527            }
528        }
529
530        if self.has_migrations()
531            && let Err(err) = self.get_migrations()
532        {
533            return Err(anyhow::anyhow!(
534                "unable to parse migrations in calagopus extension archive. make sure they are formatted as directories `20260125115245_xxx_xxx/` containing `up.sql` and `down.sql`. {err}"
535            ));
536        }
537
538        Ok(())
539    }
540
541    #[inline]
542    pub fn total_size(&self) -> u128 {
543        self.zip.decompressed_size().unwrap_or_default()
544    }
545}
546
547pub struct SlimExtensionDistrFile {
548    pub metadata_toml: MetadataToml,
549    pub cargo_toml: CargoToml,
550    pub package_json: PackageJson,
551}
552
553impl SlimExtensionDistrFile {
554    pub fn parse_from_directory(path: impl AsRef<Path>) -> Result<Vec<Self>, anyhow::Error> {
555        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
556        let mut results = Vec::new();
557
558        let mut dir = filesystem.read_dir("backend-extensions")?;
559        while let Some(Ok((is_dir, name))) = dir.next_entry() {
560            if !is_dir || name == "internal-list" {
561                continue;
562            }
563
564            let metadata_toml = filesystem.read_to_string(
565                Path::new("backend-extensions")
566                    .join(&name)
567                    .join("Metadata.toml"),
568            )?;
569            let metadata_toml: MetadataToml = toml::from_str(&metadata_toml)?;
570
571            let cargo_toml = filesystem.read_to_string(
572                Path::new("backend-extensions")
573                    .join(&name)
574                    .join("Cargo.toml"),
575            )?;
576            let cargo_toml: CargoToml = toml::from_str(&cargo_toml)?;
577
578            let package_json = filesystem.read_to_string(
579                Path::new("frontend/extensions")
580                    .join(&name)
581                    .join("package.json"),
582            )?;
583            let package_json: PackageJson = serde_json::from_str(&package_json)?;
584
585            results.push(Self {
586                metadata_toml,
587                cargo_toml,
588                package_json,
589            });
590        }
591
592        Ok(results)
593    }
594}
595
596pub struct ExtensionDistrFileBuilder {
597    zip: zip::ZipWriter<std::fs::File>,
598    wrote_backend: bool,
599    wrote_frontend: bool,
600    wrote_migrations: bool,
601}
602
603impl ExtensionDistrFileBuilder {
604    pub fn new(file: std::fs::File) -> Self {
605        Self {
606            zip: zip::ZipWriter::new(file),
607            wrote_backend: false,
608            wrote_frontend: false,
609            wrote_migrations: false,
610        }
611    }
612
613    pub fn add_backend(mut self, path: impl AsRef<Path>) -> Result<Self, anyhow::Error> {
614        if self.wrote_backend {
615            return Err(anyhow::anyhow!(
616                "Cannot write backend, it has already been written."
617            ));
618        }
619
620        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
621
622        let metadata_toml = filesystem
623            .read_to_string("Metadata.toml")
624            .context("Failed to read Metadata.toml from backend extension directory.")?;
625        self.zip
626            .start_file("Metadata.toml", FileOptions::<()>::default())?;
627        self.zip.write_all(metadata_toml.as_bytes())?;
628
629        self.zip.add_directory(
630            "backend",
631            FileOptions::<()>::default().compression_level(Some(9)),
632        )?;
633
634        let ignored = &[GitignoreBuilder::new("/")
635            .add_line(None, "Metadata.toml")?
636            .build()?];
637
638        let mut walker = filesystem.walk_dir(path)?.with_ignored(ignored);
639        while let Some(Ok((_, name))) = walker.next_entry() {
640            let metadata = filesystem.metadata(&name)?;
641            let virtual_path = Path::new("backend").join(&name);
642
643            let options: FileOptions<()> = FileOptions::default().compression_level(Some(9));
644
645            if metadata.is_dir() {
646                self.zip
647                    .add_directory(virtual_path.to_string_lossy(), options)?;
648            } else if metadata.is_file() {
649                self.zip
650                    .start_file(virtual_path.to_string_lossy(), options)?;
651
652                let mut reader = filesystem.open(&name)?;
653                std::io::copy(&mut reader, &mut self.zip)?;
654            }
655        }
656
657        self.wrote_backend = true;
658
659        Ok(self)
660    }
661
662    pub fn add_frontend(mut self, path: impl AsRef<Path>) -> Result<Self, anyhow::Error> {
663        if self.wrote_frontend {
664            return Err(anyhow::anyhow!(
665                "Cannot write frontend, it has already been written."
666            ));
667        }
668
669        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
670
671        self.zip.add_directory(
672            "frontend",
673            FileOptions::<()>::default().compression_level(Some(9)),
674        )?;
675
676        let ignored = &[GitignoreBuilder::new("/")
677            .add_line(None, "node_modules/")?
678            .build()?];
679
680        let mut walker = filesystem.walk_dir(path)?.with_ignored(ignored);
681        while let Some(Ok((_, name))) = walker.next_entry() {
682            let metadata = filesystem.metadata(&name)?;
683            let virtual_path = Path::new("frontend").join(&name);
684
685            let options: FileOptions<()> = FileOptions::default().compression_level(Some(9));
686
687            if metadata.is_dir() {
688                self.zip
689                    .add_directory(virtual_path.to_string_lossy(), options)?;
690            } else if metadata.is_file() {
691                self.zip
692                    .start_file(virtual_path.to_string_lossy(), options)?;
693
694                let mut reader = filesystem.open(&name)?;
695                std::io::copy(&mut reader, &mut self.zip)?;
696            }
697        }
698
699        self.wrote_frontend = true;
700
701        Ok(self)
702    }
703
704    pub fn add_migrations(mut self, path: impl AsRef<Path>) -> Result<Self, anyhow::Error> {
705        if self.wrote_migrations {
706            return Err(anyhow::anyhow!(
707                "Cannot write migrations, they have already been written."
708            ));
709        }
710
711        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
712
713        self.zip.add_directory(
714            "migrations",
715            FileOptions::<()>::default().compression_level(Some(9)),
716        )?;
717
718        let mut walker = filesystem.walk_dir(path)?;
719        while let Some(Ok((_, name))) = walker.next_entry() {
720            let metadata = filesystem.metadata(&name)?;
721            let virtual_path = Path::new("migrations").join(&name);
722
723            let options: FileOptions<()> = FileOptions::default().compression_level(Some(9));
724
725            if metadata.is_dir() {
726                self.zip
727                    .add_directory(virtual_path.to_string_lossy(), options)?;
728            } else if metadata.is_file() {
729                self.zip
730                    .start_file(virtual_path.to_string_lossy(), options)?;
731
732                let mut reader = filesystem.open(&name)?;
733                std::io::copy(&mut reader, &mut self.zip)?;
734            }
735        }
736
737        self.wrote_migrations = true;
738
739        Ok(self)
740    }
741
742    pub fn write(mut self) -> std::io::Result<std::fs::File> {
743        if !self.wrote_backend {
744            return Err(std::io::Error::new(
745                std::io::ErrorKind::InvalidData,
746                "Cannot finish writing extension archive: backend files not written.",
747            ));
748        }
749
750        if !self.wrote_frontend {
751            return Err(std::io::Error::new(
752                std::io::ErrorKind::InvalidData,
753                "Cannot finish writing extension archive: frontend files not written.",
754            ));
755        }
756
757        self.zip.set_comment(format!(
758            "this .c7s.zip extension archive has been generated by calagopus@{}",
759            crate::VERSION
760        ))?;
761        let writer = self.zip.finish()?;
762
763        Ok(writer)
764    }
765}
766
767pub fn resync_extension_list() -> Result<(), anyhow::Error> {
768    let internal_list_extension = Path::new("backend-extensions/internal-list");
769    let extensions_path = Path::new("backend-extensions");
770
771    let mut packages = Vec::new();
772
773    for dir in std::fs::read_dir(extensions_path).unwrap().flatten() {
774        if !dir.file_type().unwrap().is_dir() || dir.file_name() == "internal-list" {
775            continue;
776        }
777
778        let metadata_toml = match std::fs::read_to_string(dir.path().join("Metadata.toml")) {
779            Ok(file) => file,
780            Err(_) => continue,
781        };
782
783        let cargo_toml = match std::fs::read_to_string(dir.path().join("Cargo.toml")) {
784            Ok(file) => file,
785            Err(_) => continue,
786        };
787
788        #[derive(Deserialize)]
789        struct MetadataToml {
790            package_name: String,
791            name: String,
792            panel_version: semver::VersionReq,
793        }
794
795        #[derive(Deserialize)]
796        struct CargoToml {
797            package: CargoPackage,
798        }
799
800        #[derive(Deserialize)]
801        struct CargoPackage {
802            description: Option<String>,
803            authors: Option<Vec<String>>,
804            version: semver::Version,
805        }
806
807        let metadata_toml: MetadataToml = toml::from_str(&metadata_toml).unwrap();
808        let cargo_toml: CargoToml = toml::from_str(&cargo_toml).unwrap();
809        packages.push((dir.file_name(), metadata_toml, cargo_toml.package));
810    }
811
812    std::fs::create_dir_all(internal_list_extension).unwrap();
813    std::fs::create_dir_all(internal_list_extension.join("src")).unwrap();
814
815    let mut deps = String::new();
816
817    for (path, metadata, _) in packages.iter() {
818        deps.push_str(&metadata.package_name.replace('.', "_"));
819        deps.push_str(" = { path = \"../");
820        deps.push_str(&path.to_string_lossy());
821        deps.push_str("\" }\n");
822    }
823
824    const CARGO_TEMPLATE_TOML: &str =
825        include_str!("../../../backend-extensions/internal-list/Cargo.template.toml");
826
827    std::fs::write(
828        internal_list_extension.join("Cargo.toml"),
829        format!("{CARGO_TEMPLATE_TOML}{}", deps),
830    )?;
831
832    let mut exts = String::new();
833
834    for (_, metadata, package) in packages {
835        exts.push_str(&format!(
836            r#"
837        ConstructedExtension {{
838            metadata_toml: MetadataToml {{
839                package_name: {}.to_string(),
840                name: {}.to_string(),
841                panel_version: semver::VersionReq::parse({}).unwrap(),
842            }},
843            package_name: {},
844            description: {},
845            authors: &{},
846            version: semver::Version::parse({}).unwrap(),
847            extension: Arc::new({}::ExtensionStruct::default()),
848        }},"#,
849            toml::Value::String(metadata.package_name.clone()),
850            toml::Value::String(metadata.name),
851            toml::Value::String(metadata.panel_version.to_string()),
852            toml::Value::String(metadata.package_name.clone()),
853            toml::Value::String(package.description.unwrap_or_default()),
854            toml::Value::Array(
855                package
856                    .authors
857                    .unwrap_or_default()
858                    .into_iter()
859                    .map(toml::Value::String)
860                    .collect(),
861            ),
862            toml::Value::String(package.version.to_string()),
863            metadata.package_name.replace('.', "_"),
864        ));
865    }
866
867    let exts_vec = if exts.is_empty() {
868        "vec![]".to_string()
869    } else {
870        format!("vec![{}\n    ]", exts)
871    };
872
873    std::fs::write(
874        internal_list_extension.join("src/lib.rs"),
875        format!(
876            r#"#![allow(clippy::default_constructed_unit_structs)]
877#![allow(unused_imports)]
878
879use shared::extensions::{{ConstructedExtension, distr::MetadataToml}};
880use std::sync::Arc;
881
882pub fn list() -> Vec<ConstructedExtension> {{
883    {}
884}}
885"#,
886            exts_vec,
887        ),
888    )?;
889
890    Ok(())
891}