Skip to content

Commit

Permalink
chore: rename again
Browse files Browse the repository at this point in the history
  • Loading branch information
dark-flames committed Aug 18, 2024
1 parent 5e65c42 commit de5f3ee
Show file tree
Hide file tree
Showing 10 changed files with 25 additions and 25 deletions.
6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[package]
name = "quote-it"
name = "quote-data"
version = "1.0.0"
authors = ["dark-flames <[email protected]>"]
edition = "2018"
Expand All @@ -24,8 +24,8 @@ syn = "2.0.75"
heck = "0.5.0"
quote = { version = "1.0.36", optional = true }
proc-macro2 = {version = "1.0.86", optional = true}
derive = { package = "quote-it-codegen", version = "1.0.0", path = "derive" }
helpers = { package = "quote-it-helpers", version = "1.0.0", path = "helpers" }
derive = { package = "quote-data-codegen", version = "1.0.0", path = "derive" }
helpers = { package = "quote-data-helpers", version = "1.0.0", path = "helpers" }


[features]
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
# quote-it
# quote-data
A tokenization Library for Rust.

## Usage
`quote-it` provide derive macro `quote_it::QuoteIt`,
`quote-data` provide derive macro `quote_data::QuoteIt`,
which implements `quote::ToTokens` for struct or enum.

```rust
use quote_it::ToTokens;
use quote_data::QuoteIt;
use proc_macro2::TokenStream;
use quote::quote;

#[derive(ToTokens)]
#[derive(QuoteIt)]
struct Foo {
a: i32,
b: i64
Expand Down
4 changes: 2 additions & 2 deletions derive/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[package]
name = "quote-it-codegen"
name = "quote-data-codegen"
version = "1.0.0"
authors = ["dark-flames <[email protected]>"]
edition = "2018"
Expand All @@ -16,7 +16,7 @@ syn = "2.0.75"
heck = "0.5.0"
quote = "1.0.36"
proc-macro2 = "1.0.86"
helpers = { package = "quote-it-helpers", version = "1.0.0", path = "../helpers" }
helpers = { package = "quote-data-helpers", version = "1.0.0", path = "../helpers" }

[lib]
proc-macro=true
2 changes: 1 addition & 1 deletion derive/src/enum.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ impl EnumStructure {
Ok(quote! {
impl<#generics> quote::ToTokens for #name <#generics_without_bounds> #where_clause {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
use quote_it::Tokenizable;
use quote_data::Tokenizable;
match self {
#(#variants),*
}.to_tokens(tokens);
Expand Down
6 changes: 3 additions & 3 deletions derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,19 @@ pub fn derive_to_tokens(input: TokenStream) -> TokenStream {
Expr::Lit(path_lit) => match &path_lit.lit {
Lit::Str(path_str) => Ok(path_str.value()),
_ => Err(Error::new_spanned(
&mod_path,
mod_path,
"`mod_path` must be a string",
))
},
_ => Err(Error::new_spanned(
&mod_path,
mod_path,
"`mod_path` must be a string",
)),
}
.map(|path| {
TokenStream2::from_str(path.as_str()).map_err(|_| {
Error::new_spanned(
&mod_path,
mod_path,
"Value of `mod_path` must be a path of mod",
)
})
Expand Down
2 changes: 1 addition & 1 deletion derive/src/struct.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ impl StructStructure {

impl<#generics> quote::ToTokens for #name <#generics_without_bounds> #where_clause {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
use quote_it::Tokenizable;
use quote_data::Tokenizable;
#(#temp_values;)*

(quote::quote! {
Expand Down
2 changes: 1 addition & 1 deletion helpers/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[package]
name = "quote-it-helpers"
name = "quote-data-helpers"
version = "1.0.0"
authors = ["dark-flames <[email protected]>"]
edition = "2018"
Expand Down
16 changes: 8 additions & 8 deletions helpers/src/tokenizable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ impl<T: ToTokens + Clone> Tokenizable for TokenizableVec<T> {
)?;

Ok(Some(quote::quote! {
quote_it::TokenizableVec::from_value(#value_path.iter().map(
quote_data::TokenizableVec::from_value(#value_path.iter().map(
|item| #wrapped_value
).collect())
}))
Expand Down Expand Up @@ -154,7 +154,7 @@ impl Tokenizable for TokenizableString {
}

Ok(Some(quote::quote! {
quote_it::TokenizableString::from_value(#value_path.clone())
quote_data::TokenizableString::from_value(#value_path.clone())
}))
} else {
Ok(None)
Expand Down Expand Up @@ -220,7 +220,7 @@ impl<T: ToTokens + Clone> Tokenizable for TokenizableOption<T> {
)?;

Ok(Some(quote::quote! {
quote_it::TokenizableOption::from_value(#value_path.as_ref().map(|option_value| #wrapped_value))
quote_data::TokenizableOption::from_value(#value_path.as_ref().map(|option_value| #wrapped_value))
}))
} else {
Ok(None)
Expand Down Expand Up @@ -305,7 +305,7 @@ impl<T, E> Tokenizable for TokenizableResult<T, E>
)?;

Ok(Some(quote::quote! {
quote_it::TokenizableResult::from_value(
quote_data::TokenizableResult::from_value(
#value_path.clone()
.map(|result| #first_wrapped_value)
.map_err(|error| #second_wrapped_value)
Expand Down Expand Up @@ -403,7 +403,7 @@ impl<K, V> Tokenizable for TokenizableHashMap<K, V>
)?;

Ok(Some(quote::quote! {
quote_it::TokenizableHashMap::from_value(
quote_data::TokenizableHashMap::from_value(
#value_path.iter().map(
|(key, value)| (#first_wrapped_value, #second_wrapped_value)
).collect()
Expand Down Expand Up @@ -476,7 +476,7 @@ impl<T> Tokenizable for TokenizableHashSet<T>
)?;

Ok(Some(quote::quote! {
quote_it::TokenizableHashSet::from_value(#value_path.iter().map(
quote_data::TokenizableHashSet::from_value(#value_path.iter().map(
|item| #wrapped_value
).collect())
}))
Expand Down Expand Up @@ -546,7 +546,7 @@ impl <A, B> Tokenizable for TokenizablePair<A, B>
)?;

Ok(Some(quote::quote! {
quote_it::TokenizablePair::from_value((#first, #second))
quote_data::TokenizablePair::from_value((#first, #second))
}))
} else {
Ok(None)
Expand Down Expand Up @@ -591,7 +591,7 @@ impl Tokenizable for TokenizablePhantomData {
}

Ok(Some(quote::quote! {
quote_it::TokenizablePhantomData::from_value(())
quote_data::TokenizablePhantomData::from_value(())
}))
} else {
Ok(None)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_enum.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use quote_it::QuoteIt;
use quote_data::QuoteIt;
use quote::ToTokens;
use std::marker::PhantomData;

Expand Down
2 changes: 1 addition & 1 deletion tests/test_struct.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use helpers::TokenizableError;
use quote_it::QuoteIt;
use quote_data::QuoteIt;
use quote::ToTokens;
use std::collections::{HashMap, HashSet};
use std::marker::PhantomData;
Expand Down

0 comments on commit de5f3ee

Please sign in to comment.