shared/extensions/
distr.rs

1use anyhow::Context;
2use ignore::gitignore::GitignoreBuilder;
3use serde::{Deserialize, Serialize};
4use std::{
5    collections::BTreeMap,
6    io::{Read, Write},
7    path::Path,
8    sync::Arc,
9};
10use utoipa::ToSchema;
11use zip::write::FileOptions;
12
13#[derive(Clone, ToSchema, Deserialize, Serialize)]
14pub struct MetadataToml {
15    pub package_name: String,
16    pub name: String,
17    #[schema(value_type = String)]
18    pub panel_version: semver::VersionReq,
19}
20
21impl MetadataToml {
22    /// Get the package identifier for this extension.
23    /// This is derived from the package name by replacing `.` with `_`.
24    ///
25    /// Example: `com.example.myextension` becomes `com_example_myextension`.
26    #[inline]
27    pub fn get_package_identifier(&self) -> String {
28        Self::convert_package_name_to_identifier(&self.package_name)
29    }
30
31    /// Convert a package name to an identifier by replacing `.` with `_`.
32    ///
33    /// Example: `com.example.myextension` becomes `com_example_myextension`.
34    #[inline]
35    pub fn convert_package_name_to_identifier(package_name: &str) -> String {
36        package_name.replace('.', "_")
37    }
38
39    /// Convert an identifier to a package name by replacing `_` with `.`.
40    ///
41    /// Example: `com_example_myextension` becomes `com.example.myextension`.
42    #[inline]
43    pub fn convert_identifier_to_package_name(identifier: &str) -> String {
44        identifier.replace('_', ".")
45    }
46}
47
48#[derive(Clone, Deserialize, Serialize)]
49pub struct CargoPackage {
50    pub description: String,
51    pub authors: Vec<String>,
52    pub version: semver::Version,
53}
54
55#[derive(Clone, Deserialize, Serialize)]
56pub struct CargoToml {
57    pub package: CargoPackage,
58    pub dependencies: BTreeMap<String, toml::Value>,
59}
60
61#[derive(Clone, Deserialize, Serialize)]
62pub struct PackageJson {
63    pub dependencies: BTreeMap<String, String>,
64}
65
66#[derive(Clone)]
67pub struct ExtensionMigration {
68    pub id: uuid::Uuid,
69    pub name: String,
70    pub date: chrono::DateTime<chrono::Utc>,
71    pub sql: String,
72    pub sql_down: String,
73}
74
75impl ExtensionMigration {
76    pub fn from_directory_raw(
77        path: &Path,
78        extension_identifier: &str,
79        mut content_up_raw: impl std::io::Read,
80        mut content_down_raw: impl std::io::Read,
81    ) -> Result<Self, std::io::Error> {
82        let mut content_up = String::new();
83        content_up_raw.read_to_string(&mut content_up)?;
84
85        let mut content_down = String::new();
86        content_down_raw.read_to_string(&mut content_down)?;
87
88        let name = path
89            .file_name()
90            .ok_or_else(|| {
91                std::io::Error::new(
92                    std::io::ErrorKind::InvalidData,
93                    format!(
94                        "invalid migration directory name `{}`: unable to extract directory name.",
95                        path.display()
96                    ),
97                )
98            })?
99            .to_string_lossy()
100            .to_string();
101
102        if !name.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') {
103            return Err(std::io::Error::new(
104                std::io::ErrorKind::InvalidData,
105                format!(
106                    "invalid migration directory name `{}`: must be ASCII and contain only alphanumeric characters or underscores.",
107                    path.display()
108                ),
109            ));
110        }
111
112        let date = name
113            .split('_')
114            .next()
115            .and_then(|date_str| {
116                chrono::NaiveDateTime::parse_from_str(date_str, "%Y%m%d%H%M%S")
117                    .ok()
118                    .map(|ndt| ndt.and_utc())
119            })
120            .ok_or_else(|| {
121                std::io::Error::new(
122                    std::io::ErrorKind::InvalidData,
123                    format!(
124                        "invalid migration directory name `{}`: unable to parse date from directory name. expected format `yyyymmddhhmmss_description/`.",
125                        path.display()
126                    ),
127                )
128            })?;
129
130        let xxh3 = xxhash_rust::xxh3::xxh3_128(
131            format!(
132                "{}:{}:{}",
133                extension_identifier,
134                name,
135                date.timestamp_millis()
136            )
137            .as_bytes(),
138        );
139        let id = uuid::Builder::from_u128(xxh3)
140            .with_variant(uuid::Variant::RFC4122)
141            .with_version(uuid::Version::Custom)
142            .into_uuid();
143
144        Ok(Self {
145            id,
146            name,
147            date,
148            sql: content_up,
149            sql_down: content_down,
150        })
151    }
152}
153
154#[derive(Clone)]
155pub struct ExtensionDistrFile {
156    zip: zip::ZipArchive<Arc<std::fs::File>>,
157
158    pub metadata_toml: MetadataToml,
159    pub cargo_toml: CargoToml,
160    pub package_json: PackageJson,
161}
162
163impl ExtensionDistrFile {
164    pub fn parse_from_reader(file: std::fs::File) -> Result<Self, anyhow::Error> {
165        let mut zip = zip::ZipArchive::new(Arc::new(file))?;
166
167        let mut metadata_toml = zip.by_name("Metadata.toml")?;
168        let mut metadata_toml_bytes = vec![0; metadata_toml.size() as usize];
169        metadata_toml.read_exact(&mut metadata_toml_bytes)?;
170        drop(metadata_toml);
171        let metadata_toml: MetadataToml = toml::from_slice(&metadata_toml_bytes)?;
172
173        let mut cargo_toml = zip.by_name("backend/Cargo.toml")?;
174        let mut cargo_toml_bytes = vec![0; cargo_toml.size() as usize];
175        cargo_toml.read_exact(&mut cargo_toml_bytes)?;
176        drop(cargo_toml);
177
178        let cargo_toml: CargoToml = toml::from_slice(&cargo_toml_bytes)?;
179
180        let mut package_json = zip.by_name("frontend/package.json")?;
181        let mut package_json_bytes = vec![0; package_json.size() as usize];
182        package_json.read_exact(&mut package_json_bytes)?;
183        drop(package_json);
184
185        let package_json: PackageJson = serde_json::from_slice(&package_json_bytes)?;
186
187        let mut this = Self {
188            zip,
189            metadata_toml,
190            cargo_toml,
191            package_json,
192        };
193        this.validate()?;
194
195        Ok(this)
196    }
197
198    pub fn extract_backend(&mut self, path: impl AsRef<Path>) -> Result<(), anyhow::Error> {
199        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
200
201        let mut i = 0;
202        while let Ok(mut entry) = self.zip.by_index(i) {
203            i += 1;
204
205            if !entry.name().starts_with("backend/") {
206                continue;
207            }
208
209            let clean_path = match entry.enclosed_name() {
210                Some(clean_path) => clean_path,
211                None => continue,
212            };
213            let clean_path = match clean_path.strip_prefix("backend/") {
214                Ok(clean_path) => clean_path,
215                Err(_) => continue,
216            };
217
218            if entry.is_dir() {
219                filesystem.create_dir_all(clean_path)?;
220            } else if entry.is_file() {
221                let mut file = filesystem.create(clean_path)?;
222
223                std::io::copy(&mut entry, &mut file)?;
224                file.flush()?;
225                file.sync_all()?;
226            }
227        }
228
229        filesystem.write(
230            "Metadata.toml",
231            toml::to_string_pretty(&self.metadata_toml)?.into_bytes(),
232        )?;
233
234        Ok(())
235    }
236
237    pub fn extract_frontend(&mut self, path: impl AsRef<Path>) -> Result<(), anyhow::Error> {
238        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
239
240        let mut i = 0;
241        while let Ok(mut entry) = self.zip.by_index(i) {
242            i += 1;
243
244            if !entry.name().starts_with("frontend/") {
245                continue;
246            }
247
248            let clean_path = match entry.enclosed_name() {
249                Some(clean_path) => clean_path,
250                None => continue,
251            };
252            let clean_path = match clean_path.strip_prefix("frontend/") {
253                Ok(clean_path) => clean_path,
254                Err(_) => continue,
255            };
256
257            if entry.is_dir() {
258                filesystem.create_dir_all(clean_path)?;
259            } else if entry.is_file() {
260                let mut file = filesystem.create(clean_path)?;
261
262                std::io::copy(&mut entry, &mut file)?;
263                file.flush()?;
264                file.sync_all()?;
265            }
266        }
267
268        Ok(())
269    }
270
271    pub fn has_migrations(&mut self) -> bool {
272        self.zip.by_name("migrations/").is_ok()
273    }
274
275    pub fn extract_migrations(&mut self, path: impl AsRef<Path>) -> Result<(), anyhow::Error> {
276        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
277
278        let mut i = 0;
279        while let Ok(mut entry) = self.zip.by_index(i) {
280            i += 1;
281
282            if !entry.name().starts_with("migrations/") {
283                continue;
284            }
285
286            let clean_path = match entry.enclosed_name() {
287                Some(clean_path) => clean_path,
288                None => continue,
289            };
290            let clean_path = match clean_path.strip_prefix("migrations/") {
291                Ok(clean_path) => clean_path,
292                Err(_) => continue,
293            };
294
295            if entry.is_dir() {
296                filesystem.create_dir_all(clean_path)?;
297            } else if entry.is_file() {
298                let mut file = filesystem.create(clean_path)?;
299
300                std::io::copy(&mut entry, &mut file)?;
301                file.flush()?;
302                file.sync_all()?;
303            }
304        }
305
306        Ok(())
307    }
308
309    pub fn get_migrations(&mut self) -> Result<Vec<ExtensionMigration>, anyhow::Error> {
310        let mut migrations = Vec::new();
311
312        let mut migration_dirs = Vec::new();
313        let mut i = 0;
314        while let Ok(entry) = self.zip.by_index(i) {
315            i += 1;
316
317            let entry_name = entry.name().to_string();
318            if entry_name.starts_with("migrations/")
319                && entry_name.ends_with("/up.sql")
320                && !entry.is_dir()
321                && let Some(dir_name) = entry_name
322                    .strip_prefix("migrations/")
323                    .and_then(|s| s.strip_suffix("/up.sql"))
324            {
325                migration_dirs.push(dir_name.to_string());
326            }
327        }
328
329        for dir_name in migration_dirs {
330            let up_path = Path::new("migrations").join(&dir_name).join("up.sql");
331            let down_path = Path::new("migrations").join(&dir_name).join("down.sql");
332
333            let mut up_entry = self.zip.by_path(&up_path)?;
334            let mut up_bytes = vec![0; up_entry.size() as usize];
335            up_entry.read_exact(&mut up_bytes)?;
336            drop(up_entry);
337
338            let mut down_entry = self.zip.by_path(&down_path)?;
339            let mut down_bytes = vec![0; down_entry.size() as usize];
340            down_entry.read_exact(&mut down_bytes)?;
341            drop(down_entry);
342
343            let dir_path = Path::new(&dir_name);
344
345            migrations.push(ExtensionMigration::from_directory_raw(
346                dir_path,
347                &self.metadata_toml.get_package_identifier(),
348                &up_bytes[..],
349                &down_bytes[..],
350            )?);
351        }
352
353        Ok(migrations)
354    }
355
356    pub fn validate(&mut self) -> Result<(), anyhow::Error> {
357        const MUST_EXIST_DIRECTORIES: &[&str] =
358            &["backend/", "backend/src/", "frontend/", "frontend/src/"];
359        const MUST_EXIST_FILES: &[&str] = &[
360            "Metadata.toml",
361            "backend/Cargo.toml",
362            "backend/src/lib.rs",
363            "frontend/package.json",
364            "frontend/src/index.ts",
365        ];
366
367        let mut package_segments = self.metadata_toml.package_name.split('.');
368        let tld_segment = package_segments.next().ok_or_else(|| {
369            anyhow::anyhow!("invalid package name in calagopus extension archive. (too few segments, expected 3)")
370        })?;
371        let author_segment = package_segments.next().ok_or_else(|| {
372            anyhow::anyhow!("invalid package name in calagopus extension archive. (too few segments, expected 3)")
373        })?;
374        let identifier_segment = package_segments.next().ok_or_else(|| {
375            anyhow::anyhow!("invalid package name in calagopus extension archive. (too few segments, expected 3)")
376        })?;
377
378        if package_segments.next().is_some() {
379            return Err(anyhow::anyhow!(
380                "invalid package name in calagopus extension archive. (too many segments, expected 3)"
381            ));
382        }
383
384        if tld_segment.len() < 2 || tld_segment.len() > 6 {
385            return Err(anyhow::anyhow!(
386                "invalid tld segment `{}` in calagopus extension archive package name.",
387                tld_segment
388            ));
389        }
390
391        if author_segment.len() < 3 || author_segment.len() > 30 {
392            return Err(anyhow::anyhow!(
393                "invalid author segment `{}` in calagopus extension archive package name.",
394                author_segment
395            ));
396        }
397
398        if identifier_segment.len() < 4 || identifier_segment.len() > 30 {
399            return Err(anyhow::anyhow!(
400                "invalid identifier segment `{}` in calagopus extension archive package name.",
401                identifier_segment
402            ));
403        }
404
405        for c in tld_segment.chars() {
406            if !c.is_ascii_lowercase() {
407                return Err(anyhow::anyhow!(
408                    "invalid character `{c}` in tld segment of calagopus extension archive package name."
409                ));
410            }
411        }
412
413        for c in author_segment.chars() {
414            if !c.is_ascii_lowercase() && !c.is_ascii_digit() && c != '-' {
415                return Err(anyhow::anyhow!(
416                    "invalid character `{c}` in author segment of calagopus extension archive package name."
417                ));
418            }
419        }
420
421        for c in identifier_segment.chars() {
422            if !c.is_ascii_lowercase() && !c.is_ascii_digit() && c != '-' {
423                return Err(anyhow::anyhow!(
424                    "invalid character `{c}` in identifier segment of calagopus extension archive package name."
425                ));
426            }
427        }
428
429        for dir in MUST_EXIST_DIRECTORIES {
430            if self.zip.by_name(dir).ok().is_none_or(|e| !e.is_dir()) {
431                return Err(anyhow::anyhow!(
432                    "unable to find directory `{dir}` in calagopus extension archive."
433                ));
434            }
435        }
436
437        for file in MUST_EXIST_FILES {
438            if self.zip.by_name(file).ok().is_none_or(|e| !e.is_file()) {
439                return Err(anyhow::anyhow!(
440                    "unable to find file `{file}` in calagopus extension archive."
441                ));
442            }
443        }
444
445        {
446            let mut lib = self.zip.by_name("backend/src/lib.rs")?;
447            let mut lib_string = String::new();
448            lib_string.reserve_exact(lib.size() as usize);
449            lib.read_to_string(&mut lib_string)?;
450            drop(lib);
451
452            if !lib_string.contains("pub struct ExtensionStruct") {
453                return Err(anyhow::anyhow!(
454                    "unable to find `pub struct ExtensionStruct` in calagopus extension archive backend/src/lib.rs."
455                ));
456            }
457        }
458
459        {
460            let mut index = self.zip.by_name("frontend/src/index.ts")?;
461            let mut index_string = String::new();
462            index_string.reserve_exact(index.size() as usize);
463            index.read_to_string(&mut index_string)?;
464            drop(index);
465
466            if !index_string.contains("export default ") {
467                return Err(anyhow::anyhow!(
468                    "unable to find `export default ` in calagopus extension archive frontend/src/index.ts."
469                ));
470            }
471        }
472
473        if self.has_migrations()
474            && let Err(err) = self.get_migrations()
475        {
476            return Err(anyhow::anyhow!(
477                "unable to parse migrations in calagopus extension archive. make sure they are formatted as directories `20260125115245_xxx_xxx/` containing `up.sql` and `down.sql`. {err}"
478            ));
479        }
480
481        Ok(())
482    }
483
484    #[inline]
485    pub fn total_size(&self) -> u128 {
486        self.zip.decompressed_size().unwrap_or_default()
487    }
488}
489
490pub struct SlimExtensionDistrFile {
491    pub metadata_toml: MetadataToml,
492    pub cargo_toml: CargoToml,
493    pub package_json: PackageJson,
494}
495
496impl SlimExtensionDistrFile {
497    pub fn parse_from_directory(path: impl AsRef<Path>) -> Result<Vec<Self>, anyhow::Error> {
498        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
499        let mut results = Vec::new();
500
501        let mut dir = filesystem.read_dir("backend-extensions")?;
502        while let Some(Ok((is_dir, name))) = dir.next_entry() {
503            if !is_dir || name == "internal-list" {
504                continue;
505            }
506
507            let metadata_toml = filesystem.read_to_string(
508                Path::new("backend-extensions")
509                    .join(&name)
510                    .join("Metadata.toml"),
511            )?;
512            let metadata_toml: MetadataToml = toml::from_str(&metadata_toml)?;
513
514            let cargo_toml = filesystem.read_to_string(
515                Path::new("backend-extensions")
516                    .join(&name)
517                    .join("Cargo.toml"),
518            )?;
519            let cargo_toml: CargoToml = toml::from_str(&cargo_toml)?;
520
521            let package_json = filesystem.read_to_string(
522                Path::new("frontend/extensions")
523                    .join(&name)
524                    .join("package.json"),
525            )?;
526            let package_json: PackageJson = serde_json::from_str(&package_json)?;
527
528            results.push(Self {
529                metadata_toml,
530                cargo_toml,
531                package_json,
532            });
533        }
534
535        Ok(results)
536    }
537}
538
539pub struct ExtensionDistrFileBuilder {
540    zip: zip::ZipWriter<std::fs::File>,
541    wrote_backend: bool,
542    wrote_frontend: bool,
543    wrote_migrations: bool,
544}
545
546impl ExtensionDistrFileBuilder {
547    pub fn new(file: std::fs::File) -> Self {
548        Self {
549            zip: zip::ZipWriter::new(file),
550            wrote_backend: false,
551            wrote_frontend: false,
552            wrote_migrations: false,
553        }
554    }
555
556    pub fn add_backend(mut self, path: impl AsRef<Path>) -> Result<Self, anyhow::Error> {
557        if self.wrote_backend {
558            return Err(anyhow::anyhow!(
559                "Cannot write backend, it has already been written."
560            ));
561        }
562
563        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
564
565        let metadata_toml = filesystem
566            .read_to_string("Metadata.toml")
567            .context("Failed to read Metadata.toml from backend extension directory.")?;
568        self.zip
569            .start_file("Metadata.toml", FileOptions::<()>::default())?;
570        self.zip.write_all(metadata_toml.as_bytes())?;
571
572        self.zip.add_directory(
573            "backend",
574            FileOptions::<()>::default().compression_level(Some(9)),
575        )?;
576
577        let ignored = &[GitignoreBuilder::new("/")
578            .add_line(None, "Metadata.toml")?
579            .build()?];
580
581        let mut walker = filesystem.walk_dir(path)?.with_ignored(ignored);
582        while let Some(Ok((_, name))) = walker.next_entry() {
583            let metadata = filesystem.metadata(&name)?;
584            let virtual_path = Path::new("backend").join(&name);
585
586            let options: FileOptions<()> = FileOptions::default().compression_level(Some(9));
587
588            if metadata.is_dir() {
589                self.zip
590                    .add_directory(virtual_path.to_string_lossy(), options)?;
591            } else if metadata.is_file() {
592                self.zip
593                    .start_file(virtual_path.to_string_lossy(), options)?;
594
595                let mut reader = filesystem.open(&name)?;
596                std::io::copy(&mut reader, &mut self.zip)?;
597            }
598        }
599
600        self.wrote_backend = true;
601
602        Ok(self)
603    }
604
605    pub fn add_frontend(mut self, path: impl AsRef<Path>) -> Result<Self, anyhow::Error> {
606        if self.wrote_frontend {
607            return Err(anyhow::anyhow!(
608                "Cannot write frontend, it has already been written."
609            ));
610        }
611
612        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
613
614        self.zip.add_directory(
615            "frontend",
616            FileOptions::<()>::default().compression_level(Some(9)),
617        )?;
618
619        let ignored = &[GitignoreBuilder::new("/")
620            .add_line(None, "node_modules/")?
621            .build()?];
622
623        let mut walker = filesystem.walk_dir(path)?.with_ignored(ignored);
624        while let Some(Ok((_, name))) = walker.next_entry() {
625            let metadata = filesystem.metadata(&name)?;
626            let virtual_path = Path::new("frontend").join(&name);
627
628            let options: FileOptions<()> = FileOptions::default().compression_level(Some(9));
629
630            if metadata.is_dir() {
631                self.zip
632                    .add_directory(virtual_path.to_string_lossy(), options)?;
633            } else if metadata.is_file() {
634                self.zip
635                    .start_file(virtual_path.to_string_lossy(), options)?;
636
637                let mut reader = filesystem.open(&name)?;
638                std::io::copy(&mut reader, &mut self.zip)?;
639            }
640        }
641
642        self.wrote_frontend = true;
643
644        Ok(self)
645    }
646
647    pub fn add_migrations(mut self, path: impl AsRef<Path>) -> Result<Self, anyhow::Error> {
648        if self.wrote_migrations {
649            return Err(anyhow::anyhow!(
650                "Cannot write migrations, they have already been written."
651            ));
652        }
653
654        let filesystem = crate::cap::CapFilesystem::new(path.as_ref().to_path_buf())?;
655
656        self.zip.add_directory(
657            "migrations",
658            FileOptions::<()>::default().compression_level(Some(9)),
659        )?;
660
661        let mut walker = filesystem.walk_dir(path)?;
662        while let Some(Ok((_, name))) = walker.next_entry() {
663            let metadata = filesystem.metadata(&name)?;
664            let virtual_path = Path::new("migrations").join(&name);
665
666            let options: FileOptions<()> = FileOptions::default().compression_level(Some(9));
667
668            if metadata.is_dir() {
669                self.zip
670                    .add_directory(virtual_path.to_string_lossy(), options)?;
671            } else if metadata.is_file() {
672                self.zip
673                    .start_file(virtual_path.to_string_lossy(), options)?;
674
675                let mut reader = filesystem.open(&name)?;
676                std::io::copy(&mut reader, &mut self.zip)?;
677            }
678        }
679
680        self.wrote_migrations = true;
681
682        Ok(self)
683    }
684
685    pub fn write(mut self) -> std::io::Result<std::fs::File> {
686        if !self.wrote_backend {
687            return Err(std::io::Error::new(
688                std::io::ErrorKind::InvalidData,
689                "Cannot finish writing extension archive: backend files not written.",
690            ));
691        }
692
693        if !self.wrote_frontend {
694            return Err(std::io::Error::new(
695                std::io::ErrorKind::InvalidData,
696                "Cannot finish writing extension archive: frontend files not written.",
697            ));
698        }
699
700        self.zip.set_comment(format!(
701            "this .c7s.zip extension archive has been generated by calagopus@{}",
702            crate::VERSION
703        ));
704        let writer = self.zip.finish()?;
705
706        Ok(writer)
707    }
708}
709
710pub fn resync_extension_list() -> Result<(), anyhow::Error> {
711    let internal_list_extension = Path::new("backend-extensions/internal-list");
712    let extensions_path = Path::new("backend-extensions");
713
714    let mut packages = Vec::new();
715
716    for dir in std::fs::read_dir(extensions_path).unwrap().flatten() {
717        if !dir.file_type().unwrap().is_dir() || dir.file_name() == "internal-list" {
718            continue;
719        }
720
721        let metadata_toml = match std::fs::read_to_string(dir.path().join("Metadata.toml")) {
722            Ok(file) => file,
723            Err(_) => continue,
724        };
725
726        let cargo_toml = match std::fs::read_to_string(dir.path().join("Cargo.toml")) {
727            Ok(file) => file,
728            Err(_) => continue,
729        };
730
731        #[derive(Deserialize)]
732        struct MetadataToml {
733            package_name: String,
734            name: String,
735            panel_version: semver::VersionReq,
736        }
737
738        #[derive(Deserialize)]
739        struct CargoToml {
740            package: CargoPackage,
741        }
742
743        #[derive(Deserialize)]
744        struct CargoPackage {
745            description: Option<String>,
746            authors: Option<Vec<String>>,
747            version: semver::Version,
748        }
749
750        let metadata_toml: MetadataToml = toml::from_str(&metadata_toml).unwrap();
751        let cargo_toml: CargoToml = toml::from_str(&cargo_toml).unwrap();
752        packages.push((dir.file_name(), metadata_toml, cargo_toml.package));
753    }
754
755    std::fs::create_dir_all(internal_list_extension).unwrap();
756    std::fs::create_dir_all(internal_list_extension.join("src")).unwrap();
757
758    let mut deps = String::new();
759
760    for (path, metadata, _) in packages.iter() {
761        deps.push_str(&metadata.package_name.replace('.', "_"));
762        deps.push_str(" = { path = \"../");
763        deps.push_str(&path.to_string_lossy());
764        deps.push_str("\" }\n");
765    }
766
767    const CARGO_TEMPLATE_TOML: &str =
768        include_str!("../../../backend-extensions/internal-list/Cargo.template.toml");
769
770    std::fs::write(
771        internal_list_extension.join("Cargo.toml"),
772        format!("{CARGO_TEMPLATE_TOML}{}", deps),
773    )?;
774
775    let mut exts = String::new();
776
777    for (_, metadata, package) in packages {
778        exts.push_str(&format!(
779            r#"
780        ConstructedExtension {{
781            metadata_toml: MetadataToml {{
782                package_name: {}.to_string(),
783                name: {}.to_string(),
784                panel_version: semver::VersionReq::parse({}).unwrap(),
785            }},
786            package_name: {},
787            description: {},
788            authors: &{},
789            version: semver::Version::parse({}).unwrap(),
790            extension: Arc::new({}::ExtensionStruct::default()),
791        }},"#,
792            toml::Value::String(metadata.package_name.clone()),
793            toml::Value::String(metadata.name),
794            toml::Value::String(metadata.panel_version.to_string()),
795            toml::Value::String(metadata.package_name.clone()),
796            toml::Value::String(package.description.unwrap_or_default()),
797            toml::Value::Array(
798                package
799                    .authors
800                    .unwrap_or_default()
801                    .into_iter()
802                    .map(toml::Value::String)
803                    .collect(),
804            ),
805            toml::Value::String(package.version.to_string()),
806            metadata.package_name.replace('.', "_"),
807        ));
808    }
809
810    std::fs::write(
811        internal_list_extension.join("src/lib.rs"),
812        format!(
813            r#"#![allow(clippy::default_constructed_unit_structs)]
814#![allow(unused_imports)]
815
816use shared::extensions::{{ConstructedExtension, distr::MetadataToml}};
817use std::sync::Arc;
818
819pub fn list() -> Vec<ConstructedExtension> {{
820    vec![{}
821    ]
822}}
823"#,
824            exts,
825        ),
826    )?;
827
828    Ok(())
829}