diff --git a/CHANGELOG.md b/CHANGELOG.md index 83d9c535..1bf7614f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ### Features - The `bson-uuid-impl` feature now supports `bson::oid::ObjectId` as well ([#340](https://github.com/Aleph-Alpha/ts-rs/pull/340)) +- Allow multile types to have the same `#[ts(export_to = "...")]` attribute and be exported to the same file ([#316](https://github.com/Aleph-Alpha/ts-rs/pull/316)) ### Fixes diff --git a/Cargo.lock b/Cargo.lock index 3cb30379..ace8028b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1151,6 +1151,7 @@ dependencies = [ "dprint-plugin-typescript", "heapless", "indexmap", + "lazy_static", "ordered-float", "semver", "serde", diff --git a/macros/src/attr/field.rs b/macros/src/attr/field.rs index dde475d4..c7726efc 100644 --- a/macros/src/attr/field.rs +++ b/macros/src/attr/field.rs @@ -1,275 +1,275 @@ -use syn::{ - AngleBracketedGenericArguments, Attribute, Field, GenericArgument, Ident, PathArguments, QSelf, - Result, ReturnType, Type, TypeArray, TypeGroup, TypeParen, TypePath, TypePtr, TypeReference, - TypeSlice, TypeTuple, -}; - -use super::{parse_assign_from_str, parse_assign_str, Attr, Serde}; -use crate::utils::{parse_attrs, parse_docs}; - -#[derive(Default)] -pub struct FieldAttr { - type_as: Option, - pub type_override: Option, - pub rename: Option, - pub inline: bool, - pub skip: bool, - pub optional: Optional, - pub flatten: bool, - pub docs: String, - - pub using_serde_with: bool, -} - -/// Indicates whether the field is marked with `#[ts(optional)]`. -/// `#[ts(optional)]` turns an `t: Option` into `t?: T`, while -/// `#[ts(optional = nullable)]` turns it into `t?: T | null`. -#[derive(Default)] -pub struct Optional { - pub optional: bool, - pub nullable: bool, -} - -impl FieldAttr { - pub fn from_attrs(attrs: &[Attribute]) -> Result { - let mut result = parse_attrs::(attrs)?; - - if cfg!(feature = "serde-compat") && !result.skip { - let serde_attr = crate::utils::parse_serde_attrs::(attrs); - result = result.merge(serde_attr.0); - } - - result.docs = parse_docs(attrs)?; - - Ok(result) - } - - pub fn type_as(&self, original_type: &Type) -> Type { - if let Some(mut ty) = self.type_as.clone() { - replace_underscore(&mut ty, original_type); - ty - } else { - original_type.clone() - } - } -} - -impl Attr for FieldAttr { - type Item = Field; - - fn merge(self, other: Self) -> Self { - Self { - type_as: self.type_as.or(other.type_as), - type_override: self.type_override.or(other.type_override), - rename: self.rename.or(other.rename), - inline: self.inline || other.inline, - skip: self.skip || other.skip, - optional: Optional { - optional: self.optional.optional || other.optional.optional, - nullable: self.optional.nullable || other.optional.nullable, - }, - flatten: self.flatten || other.flatten, - - using_serde_with: self.using_serde_with || other.using_serde_with, - - // We can't emit TSDoc for a flattened field - // and we cant make this invalid in assert_validity because - // this documentation is totally valid in Rust - docs: if self.flatten || other.flatten { - String::new() - } else { - self.docs + &other.docs - }, - } - } - - fn assert_validity(&self, field: &Self::Item) -> Result<()> { - if cfg!(feature = "serde-compat") - && self.using_serde_with - && !(self.type_as.is_some() || self.type_override.is_some()) - { - syn_err_spanned!( - field; - r#"using `#[serde(with = "...")]` requires the use of `#[ts(as = "...")]` or `#[ts(type = "...")]`"# - ) - } - - if self.type_override.is_some() { - if self.type_as.is_some() { - syn_err_spanned!(field; "`type` is not compatible with `as`") - } - - if self.inline { - syn_err_spanned!(field; "`type` is not compatible with `inline`") - } - - if self.flatten { - syn_err_spanned!( - field; - "`type` is not compatible with `flatten`" - ); - } - } - - if self.flatten { - if self.type_as.is_some() { - syn_err_spanned!( - field; - "`as` is not compatible with `flatten`" - ); - } - - if self.rename.is_some() { - syn_err_spanned!( - field; - "`rename` is not compatible with `flatten`" - ); - } - - if self.inline { - syn_err_spanned!( - field; - "`inline` is not compatible with `flatten`" - ); - } - - if self.optional.optional { - syn_err_spanned!( - field; - "`optional` is not compatible with `flatten`" - ); - } - } - - if field.ident.is_none() { - if self.flatten { - syn_err_spanned!( - field; - "`flatten` cannot with tuple struct fields" - ); - } - - if self.rename.is_some() { - syn_err_spanned!( - field; - "`flatten` cannot with tuple struct fields" - ); - } - - if self.optional.optional { - syn_err_spanned!( - field; - "`optional` cannot with tuple struct fields" - ); - } - } - - Ok(()) - } -} - -impl_parse! { - FieldAttr(input, out) { - "as" => out.type_as = Some(parse_assign_from_str(input)?), - "type" => out.type_override = Some(parse_assign_str(input)?), - "rename" => out.rename = Some(parse_assign_str(input)?), - "inline" => out.inline = true, - "skip" => out.skip = true, - "optional" => { - use syn::{Token, Error}; - let nullable = if input.peek(Token![=]) { - input.parse::()?; - let span = input.span(); - match Ident::parse(input)?.to_string().as_str() { - "nullable" => true, - _ => Err(Error::new(span, "expected 'nullable'"))? - } - } else { - false - }; - out.optional = Optional { - optional: true, - nullable, - } - }, - "flatten" => out.flatten = true, - } -} - -impl_parse! { - Serde(input, out) { - "rename" => out.0.rename = Some(parse_assign_str(input)?), - "skip" => out.0.skip = true, - "flatten" => out.0.flatten = true, - // parse #[serde(default)] to not emit a warning - "default" => { - use syn::Token; - if input.peek(Token![=]) { - parse_assign_str(input)?; - } - }, - "with" => { - parse_assign_str(input)?; - out.0.using_serde_with = true; - }, - } -} - -fn replace_underscore(ty: &mut Type, with: &Type) { - match ty { - Type::Infer(_) => *ty = with.clone(), - Type::Array(TypeArray { elem, .. }) - | Type::Group(TypeGroup { elem, .. }) - | Type::Paren(TypeParen { elem, .. }) - | Type::Ptr(TypePtr { elem, .. }) - | Type::Reference(TypeReference { elem, .. }) - | Type::Slice(TypeSlice { elem, .. }) => { - replace_underscore(elem, with); - } - Type::Tuple(TypeTuple { elems, .. }) => { - for elem in elems { - replace_underscore(elem, with); - } - } - Type::Path(TypePath { path, qself }) => { - if let Some(QSelf { ty, .. }) = qself { - replace_underscore(ty, with); - } - - for segment in &mut path.segments { - match &mut segment.arguments { - PathArguments::None => (), - PathArguments::AngleBracketed(a) => { - replace_underscore_in_angle_bracketed(a, with); - } - PathArguments::Parenthesized(p) => { - for input in &mut p.inputs { - replace_underscore(input, with); - } - if let ReturnType::Type(_, output) = &mut p.output { - replace_underscore(output, with); - } - } - } - } - } - _ => (), - } -} - -fn replace_underscore_in_angle_bracketed(args: &mut AngleBracketedGenericArguments, with: &Type) { - for arg in &mut args.args { - match arg { - GenericArgument::Type(ty) => { - replace_underscore(ty, with); - } - GenericArgument::AssocType(assoc_ty) => { - replace_underscore(&mut assoc_ty.ty, with); - for g in &mut assoc_ty.generics { - replace_underscore_in_angle_bracketed(g, with); - } - } - _ => (), - } - } -} +use syn::{ + AngleBracketedGenericArguments, Attribute, Field, GenericArgument, Ident, PathArguments, QSelf, + Result, ReturnType, Type, TypeArray, TypeGroup, TypeParen, TypePath, TypePtr, TypeReference, + TypeSlice, TypeTuple, +}; + +use super::{parse_assign_from_str, parse_assign_str, Attr, Serde}; +use crate::utils::{parse_attrs, parse_docs}; + +#[derive(Default)] +pub struct FieldAttr { + type_as: Option, + pub type_override: Option, + pub rename: Option, + pub inline: bool, + pub skip: bool, + pub optional: Optional, + pub flatten: bool, + pub docs: String, + + pub using_serde_with: bool, +} + +/// Indicates whether the field is marked with `#[ts(optional)]`. +/// `#[ts(optional)]` turns an `t: Option` into `t?: T`, while +/// `#[ts(optional = nullable)]` turns it into `t?: T | null`. +#[derive(Default)] +pub struct Optional { + pub optional: bool, + pub nullable: bool, +} + +impl FieldAttr { + pub fn from_attrs(attrs: &[Attribute]) -> Result { + let mut result = parse_attrs::(attrs)?; + + if cfg!(feature = "serde-compat") && !result.skip { + let serde_attr = crate::utils::parse_serde_attrs::(attrs); + result = result.merge(serde_attr.0); + } + + result.docs = parse_docs(attrs)?; + + Ok(result) + } + + pub fn type_as(&self, original_type: &Type) -> Type { + if let Some(mut ty) = self.type_as.clone() { + replace_underscore(&mut ty, original_type); + ty + } else { + original_type.clone() + } + } +} + +impl Attr for FieldAttr { + type Item = Field; + + fn merge(self, other: Self) -> Self { + Self { + type_as: self.type_as.or(other.type_as), + type_override: self.type_override.or(other.type_override), + rename: self.rename.or(other.rename), + inline: self.inline || other.inline, + skip: self.skip || other.skip, + optional: Optional { + optional: self.optional.optional || other.optional.optional, + nullable: self.optional.nullable || other.optional.nullable, + }, + flatten: self.flatten || other.flatten, + + using_serde_with: self.using_serde_with || other.using_serde_with, + + // We can't emit TSDoc for a flattened field + // and we cant make this invalid in assert_validity because + // this documentation is totally valid in Rust + docs: if self.flatten || other.flatten { + String::new() + } else { + self.docs + &other.docs + }, + } + } + + fn assert_validity(&self, field: &Self::Item) -> Result<()> { + if cfg!(feature = "serde-compat") + && self.using_serde_with + && !(self.type_as.is_some() || self.type_override.is_some()) + { + syn_err_spanned!( + field; + r#"using `#[serde(with = "...")]` requires the use of `#[ts(as = "...")]` or `#[ts(type = "...")]`"# + ) + } + + if self.type_override.is_some() { + if self.type_as.is_some() { + syn_err_spanned!(field; "`type` is not compatible with `as`") + } + + if self.inline { + syn_err_spanned!(field; "`type` is not compatible with `inline`") + } + + if self.flatten { + syn_err_spanned!( + field; + "`type` is not compatible with `flatten`" + ); + } + } + + if self.flatten { + if self.type_as.is_some() { + syn_err_spanned!( + field; + "`as` is not compatible with `flatten`" + ); + } + + if self.rename.is_some() { + syn_err_spanned!( + field; + "`rename` is not compatible with `flatten`" + ); + } + + if self.inline { + syn_err_spanned!( + field; + "`inline` is not compatible with `flatten`" + ); + } + + if self.optional.optional { + syn_err_spanned!( + field; + "`optional` is not compatible with `flatten`" + ); + } + } + + if field.ident.is_none() { + if self.flatten { + syn_err_spanned!( + field; + "`flatten` cannot with tuple struct fields" + ); + } + + if self.rename.is_some() { + syn_err_spanned!( + field; + "`flatten` cannot with tuple struct fields" + ); + } + + if self.optional.optional { + syn_err_spanned!( + field; + "`optional` cannot with tuple struct fields" + ); + } + } + + Ok(()) + } +} + +impl_parse! { + FieldAttr(input, out) { + "as" => out.type_as = Some(parse_assign_from_str(input)?), + "type" => out.type_override = Some(parse_assign_str(input)?), + "rename" => out.rename = Some(parse_assign_str(input)?), + "inline" => out.inline = true, + "skip" => out.skip = true, + "optional" => { + use syn::{Token, Error}; + let nullable = if input.peek(Token![=]) { + input.parse::()?; + let span = input.span(); + match Ident::parse(input)?.to_string().as_str() { + "nullable" => true, + _ => Err(Error::new(span, "expected 'nullable'"))? + } + } else { + false + }; + out.optional = Optional { + optional: true, + nullable, + } + }, + "flatten" => out.flatten = true, + } +} + +impl_parse! { + Serde(input, out) { + "rename" => out.0.rename = Some(parse_assign_str(input)?), + "skip" => out.0.skip = true, + "flatten" => out.0.flatten = true, + // parse #[serde(default)] to not emit a warning + "default" => { + use syn::Token; + if input.peek(Token![=]) { + parse_assign_str(input)?; + } + }, + "with" => { + parse_assign_str(input)?; + out.0.using_serde_with = true; + }, + } +} + +fn replace_underscore(ty: &mut Type, with: &Type) { + match ty { + Type::Infer(_) => *ty = with.clone(), + Type::Array(TypeArray { elem, .. }) + | Type::Group(TypeGroup { elem, .. }) + | Type::Paren(TypeParen { elem, .. }) + | Type::Ptr(TypePtr { elem, .. }) + | Type::Reference(TypeReference { elem, .. }) + | Type::Slice(TypeSlice { elem, .. }) => { + replace_underscore(elem, with); + } + Type::Tuple(TypeTuple { elems, .. }) => { + for elem in elems { + replace_underscore(elem, with); + } + } + Type::Path(TypePath { path, qself }) => { + if let Some(QSelf { ty, .. }) = qself { + replace_underscore(ty, with); + } + + for segment in &mut path.segments { + match &mut segment.arguments { + PathArguments::None => (), + PathArguments::AngleBracketed(a) => { + replace_underscore_in_angle_bracketed(a, with); + } + PathArguments::Parenthesized(p) => { + for input in &mut p.inputs { + replace_underscore(input, with); + } + if let ReturnType::Type(_, output) = &mut p.output { + replace_underscore(output, with); + } + } + } + } + } + _ => (), + } +} + +fn replace_underscore_in_angle_bracketed(args: &mut AngleBracketedGenericArguments, with: &Type) { + for arg in &mut args.args { + match arg { + GenericArgument::Type(ty) => { + replace_underscore(ty, with); + } + GenericArgument::AssocType(assoc_ty) => { + replace_underscore(&mut assoc_ty.ty, with); + if let Some(g) = &mut assoc_ty.generics { + replace_underscore_in_angle_bracketed(g, with); + } + } + _ => (), + } + } +} diff --git a/ts-rs/Cargo.toml b/ts-rs/Cargo.toml index ff6ed588..1b994ec1 100644 --- a/ts-rs/Cargo.toml +++ b/ts-rs/Cargo.toml @@ -57,4 +57,5 @@ thiserror = "1" indexmap = { version = "2", optional = true } ordered-float = { version = ">= 3, < 5", optional = true } serde_json = { version = "1", optional = true } +lazy_static = { version = "1", default-features = false } diff --git a/ts-rs/src/export.rs b/ts-rs/src/export.rs index cfcee040..0b562e50 100644 --- a/ts-rs/src/export.rs +++ b/ts-rs/src/export.rs @@ -1,36 +1,30 @@ use std::{ any::TypeId, borrow::Cow, - collections::BTreeMap, + collections::{BTreeMap, BTreeSet, HashMap, HashSet}, fmt::Write, fs::File, + io::{Seek, SeekFrom}, path::{Component, Path, PathBuf}, sync::Mutex, }; +pub use error::ExportError; +use lazy_static::lazy_static; +use path::diff_paths; pub(crate) use recursive_export::export_all_into; -use thiserror::Error; use crate::TS; +mod error; mod path; -const NOTE: &str = "// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.\n"; - -/// An error which may occur when exporting a type -#[derive(Error, Debug)] -pub enum ExportError { - #[error("this type cannot be exported")] - CannotBeExported(&'static str), - #[cfg(feature = "format")] - #[error("an error occurred while formatting the generated typescript output")] - Formatting(String), - #[error("an error occurred while performing IO")] - Io(#[from] std::io::Error), - #[error("the environment variable CARGO_MANIFEST_DIR is not set")] - ManifestDirNotSet, +lazy_static! { + static ref EXPORT_PATHS: Mutex>> = Mutex::new(HashMap::new()); } +const NOTE: &str = "// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.\n"; + mod recursive_export { use std::{any::TypeId, collections::HashSet, path::Path}; @@ -108,10 +102,8 @@ pub(crate) fn export_into( pub(crate) fn export_to>( path: P, ) -> Result<(), ExportError> { - // Lock to make sure only one file will be written at a time. - // In the future, it might make sense to replace this with something more clever to only prevent - // two threads from writing the **same** file concurrently. - static FILE_LOCK: Mutex<()> = Mutex::new(()); + let path = path.as_ref().to_owned(); + let type_name = T::ident(); #[allow(unused_mut)] let mut buffer = export_to_string::()?; @@ -129,23 +121,143 @@ pub(crate) fn export_to>( } } - if let Some(parent) = path.as_ref().parent() { + if let Some(parent) = path.parent() { std::fs::create_dir_all(parent)?; } - let lock = FILE_LOCK.lock().unwrap(); - { - // Manually write to file & call `sync_data`. Otherwise, calling `fs::read(path)` - // immediately after `T::export()` might result in an empty file. - use std::io::Write; - let mut file = File::create(path)?; - file.write_all(buffer.as_bytes())?; - file.sync_data()?; + + export_and_merge(path, type_name, buffer)?; + + Ok(()) +} + +/// Exports the type to a new file if the file hasn't yet been written to. +/// Otherwise, finds its place in the already existing file and inserts it. +fn export_and_merge( + path: PathBuf, + type_name: String, + generated_type: String, +) -> Result<(), ExportError> { + use std::io::{Read, Write}; + + let mut lock = EXPORT_PATHS.lock().unwrap(); + + let Some(entry) = lock.get_mut(&path) else { + // The file hasn't been written to yet, so it must be + // overwritten + let mut file = File::create(&path)?; + file.write_all(generated_type.as_bytes())?; + file.sync_all()?; + + let mut set = HashSet::new(); + set.insert(type_name); + lock.insert(path, set); + + return Ok(()); + }; + + if entry.contains(&type_name) { + return Ok(()); } - drop(lock); + let mut file = std::fs::OpenOptions::new() + .read(true) + .write(true) + .open(&path)?; + + let file_len = file.metadata()?.len(); + + let mut original_contents = String::with_capacity(file_len as usize); + file.read_to_string(&mut original_contents)?; + + let buffer = merge(original_contents, generated_type); + + file.seek(SeekFrom::Start(NOTE.len() as u64))?; + + file.write_all(buffer.as_bytes())?; + file.sync_all()?; + + entry.insert(type_name); + Ok(()) } +const HEADER_ERROR_MESSAGE: &'static str = "The generated strings must have their NOTE and imports separated from their type declarations by a new line"; + +const DECLARATION_START: &'static str = "export type "; + +/// Inserts the imports and declaration from the newly generated type +/// into the contents of the file, removimg duplicate imports and organazing +/// both imports and declarations alphabetically +fn merge(original_contents: String, new_contents: String) -> String { + let (original_header, original_decls) = original_contents + .split_once("\n\n") + .expect(HEADER_ERROR_MESSAGE); + let (new_header, new_decl) = new_contents.split_once("\n\n").expect(HEADER_ERROR_MESSAGE); + + let imports = original_header + .lines() + .skip(1) + .chain(new_header.lines().skip(1)) + .collect::>(); + + let import_len = imports.iter().map(|&x| x.len()).sum::() + imports.len(); + let capacity = import_len + original_decls.len() + new_decl.len() + 2; + + let mut buffer = String::with_capacity(capacity); + + for import in imports { + buffer.push_str(import); + buffer.push('\n') + } + + let new_decl = new_decl.trim_matches('\n'); + + let new_decl_name = new_decl + .split(DECLARATION_START) + .last() + .unwrap() + .split_whitespace() + .next() + .unwrap(); + + let original_decls = original_decls.split("\n\n").map(|x| x.trim_matches('\n')); + + let mut inserted = false; + for decl in original_decls { + let decl_name = decl + .split(DECLARATION_START) + .last() + .unwrap() + .split_whitespace() + .next() + .unwrap(); + + if inserted || decl_name < new_decl_name { + buffer.push('\n'); + buffer.push_str(decl); + buffer.push('\n'); + } else { + buffer.push('\n'); + buffer.push_str(new_decl); + buffer.push('\n'); + + buffer.push('\n'); + buffer.push_str(decl); + buffer.push('\n'); + + inserted = true; + } + } + + if !inserted { + buffer.push('\n'); + buffer.push_str(new_decl); + buffer.push('\n'); + } + + buffer +} + /// Returns the generated definition for `T`. pub(crate) fn export_to_string() -> Result { let mut buffer = String::with_capacity(1024); @@ -183,8 +295,8 @@ fn generate_imports( ) -> Result<(), ExportError> { let path = T::output_path() .ok_or_else(std::any::type_name::) + .map(|x| out_dir.as_ref().join(x)) .map_err(ExportError::CannotBeExported)?; - let path = out_dir.as_ref().join(path); let deps = T::dependencies(); let deduplicated_deps = deps @@ -195,22 +307,33 @@ fn generate_imports( for (_, dep) in deduplicated_deps { let dep_path = out_dir.as_ref().join(dep.output_path); - let rel_path = import_path(&path, &dep_path); + let rel_path = import_path(&path, &dep_path)?; + + let is_same_file = path + .file_name() + .and_then(std::ffi::OsStr::to_str) + .map(|x| x.trim_end_matches(".ts")) + .map(|x| format!("./{x}")) + .map(|x| x == rel_path.trim_end_matches(".js")) + .unwrap_or(false); + + if is_same_file { + continue; + } + writeln!( out, "import type {{ {} }} from {:?};", &dep.ts_name, rel_path - ) - .unwrap(); + )?; } - writeln!(out).unwrap(); + writeln!(out)?; Ok(()) } /// Returns the required import path for importing `import` from the file `from` -fn import_path(from: &Path, import: &Path) -> String { - let rel_path = - diff_paths(import, from.parent().unwrap()).expect("failed to calculate import path"); +fn import_path(from: &Path, import: &Path) -> Result { + let rel_path = diff_paths(import, from.parent().unwrap())?; let path = match rel_path.components().next() { Some(Component::Normal(_)) => format!("./{}", rel_path.to_string_lossy()), _ => rel_path.to_string_lossy().into(), @@ -218,65 +341,9 @@ fn import_path(from: &Path, import: &Path) -> String { let path_without_extension = path.trim_end_matches(".ts"); - if cfg!(feature = "import-esm") { + Ok(if cfg!(feature = "import-esm") { format!("{}.js", path_without_extension) } else { path_without_extension.to_owned() - } -} - -// Construct a relative path from a provided base directory path to the provided path. -// -// Copyright 2012-2015 The Rust Project Developers. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. -// -// Adapted from rustc's path_relative_from -// https://github.com/rust-lang/rust/blob/e1d0de82cc40b666b88d4a6d2c9dcbc81d7ed27f/src/librustc_back/rpath.rs#L116-L158 -fn diff_paths(path: P, base: B) -> Result -where - P: AsRef, - B: AsRef, -{ - use Component as C; - - let path = path::absolute(path)?; - let base = path::absolute(base)?; - - let mut ita = path.components(); - let mut itb = base.components(); - let mut comps: Vec = vec![]; - - loop { - match (ita.next(), itb.next()) { - (Some(C::ParentDir | C::CurDir), _) | (_, Some(C::ParentDir | C::CurDir)) => { - unreachable!( - "The paths have been cleaned, no no '.' or '..' components are present" - ) - } - (None, None) => break, - (Some(a), None) => { - comps.push(a); - comps.extend(ita.by_ref()); - break; - } - (None, _) => comps.push(Component::ParentDir), - (Some(a), Some(b)) if comps.is_empty() && a == b => (), - (Some(a), Some(_)) => { - comps.push(Component::ParentDir); - for _ in itb { - comps.push(Component::ParentDir); - } - comps.push(a); - comps.extend(ita.by_ref()); - break; - } - } - } - - Ok(comps.iter().map(|c| c.as_os_str()).collect()) + }) } diff --git a/ts-rs/src/export/error.rs b/ts-rs/src/export/error.rs new file mode 100644 index 00000000..f6068802 --- /dev/null +++ b/ts-rs/src/export/error.rs @@ -0,0 +1,15 @@ +/// An error which may occur when exporting a type +#[derive(thiserror::Error, Debug)] +pub enum ExportError { + #[error("this type cannot be exported")] + CannotBeExported(&'static str), + #[cfg(feature = "format")] + #[error("an error occurred while formatting the generated typescript output")] + Formatting(String), + #[error("an error occurred while performing IO")] + Io(#[from] std::io::Error), + #[error("the environment variable CARGO_MANIFEST_DIR is not set")] + ManifestDirNotSet, + #[error("an error occurred while writing to a formatted buffer")] + Fmt(#[from] std::fmt::Error), +} diff --git a/ts-rs/src/export/path.rs b/ts-rs/src/export/path.rs index fdfc0b14..f43b8c7f 100644 --- a/ts-rs/src/export/path.rs +++ b/ts-rs/src/export/path.rs @@ -3,6 +3,7 @@ use std::path::{Component as C, Path, PathBuf}; use super::ExportError as E; const ERROR_MESSAGE: &str = r#"The path provided with `#[ts(export_to = "..")]` is not valid"#; + pub fn absolute>(path: T) -> Result { let path = std::env::current_dir()?.join(path.as_ref()); @@ -23,3 +24,57 @@ pub fn absolute>(path: T) -> Result { PathBuf::from(".") }) } + +// Construct a relative path from a provided base directory path to the provided path. +// +// Copyright 2012-2015 The Rust Project Developers. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// Adapted from rustc's path_relative_from +// https://github.com/rust-lang/rust/blob/e1d0de82cc40b666b88d4a6d2c9dcbc81d7ed27f/src/librustc_back/rpath.rs#L116-L158 +pub(super) fn diff_paths(path: P, base: B) -> Result +where + P: AsRef, + B: AsRef, +{ + let path = absolute(path)?; + let base = absolute(base)?; + + let mut ita = path.components(); + let mut itb = base.components(); + let mut comps: Vec = vec![]; + + loop { + match (ita.next(), itb.next()) { + (Some(C::ParentDir | C::CurDir), _) | (_, Some(C::ParentDir | C::CurDir)) => { + unreachable!( + "The paths have been cleaned, no no '.' or '..' components are present" + ) + } + (None, None) => break, + (Some(a), None) => { + comps.push(a); + comps.extend(ita.by_ref()); + break; + } + (None, _) => comps.push(C::ParentDir), + (Some(a), Some(b)) if comps.is_empty() && a == b => (), + (Some(a), Some(_)) => { + comps.push(C::ParentDir); + for _ in itb { + comps.push(C::ParentDir); + } + comps.push(a); + comps.extend(ita.by_ref()); + break; + } + } + } + + Ok(comps.iter().map(|c| c.as_os_str()).collect()) +} diff --git a/ts-rs/tests/integration/main.rs b/ts-rs/tests/integration/main.rs index ee88b59a..1bcecf0b 100644 --- a/ts-rs/tests/integration/main.rs +++ b/ts-rs/tests/integration/main.rs @@ -39,6 +39,7 @@ mod ranges; mod raw_idents; mod recursion_limit; mod references; +mod same_file_export; mod self_referential; mod semver; mod serde_json; diff --git a/ts-rs/tests/integration/same_file_export.rs b/ts-rs/tests/integration/same_file_export.rs new file mode 100644 index 00000000..3a9eaeec --- /dev/null +++ b/ts-rs/tests/integration/same_file_export.rs @@ -0,0 +1,33 @@ +use ts_rs::TS; + +#[derive(TS)] +#[ts(export, export_to = "same_file_export/")] +struct DepA { + foo: i32, +} + +#[derive(TS)] +#[ts(export, export_to = "same_file_export/")] +struct DepB { + foo: i32, +} + +#[derive(TS)] +#[ts(export, export_to = "same_file_export/types.ts")] +struct A { + foo: DepA, +} + +#[derive(TS)] +#[ts(export, export_to = "same_file_export/types.ts")] +struct B { + foo: DepB, +} + +#[derive(TS)] +#[ts(export, export_to = "same_file_export/types.ts")] +struct C { + foo: DepA, + bar: DepB, + biz: B, +}