+140
| use crate::error::Result; | ||
| use crate::segment::{self, Segment}; | ||
| use proc_macro::{Delimiter, Group, Span, TokenStream, TokenTree}; | ||
| use std::iter; | ||
| use std::mem; | ||
| use std::str::FromStr; | ||
| pub fn expand_attr( | ||
| attr: TokenStream, | ||
| span: Span, | ||
| contains_paste: &mut bool, | ||
| ) -> Result<TokenStream> { | ||
| let mut tokens = attr.clone().into_iter(); | ||
| match tokens.next() { | ||
| Some(TokenTree::Ident(..)) => {} | ||
| _ => return Ok(attr), | ||
| } | ||
| let group = match tokens.next() { | ||
| Some(TokenTree::Punct(ref punct)) if punct.as_char() == '=' => { | ||
| let mut count = 0; | ||
| if tokens.inspect(|_| count += 1).all(|tt| is_stringlike(&tt)) && count > 1 { | ||
| *contains_paste = true; | ||
| return do_paste_name_value_attr(attr, span); | ||
| } | ||
| return Ok(attr); | ||
| } | ||
| Some(TokenTree::Group(group)) => group, | ||
| _ => return Ok(attr), | ||
| }; | ||
| if group.delimiter() != Delimiter::Parenthesis { | ||
| return Ok(attr); | ||
| } | ||
| // There can't be anything else after the first group in a valid attribute. | ||
| if tokens.next().is_some() { | ||
| return Ok(attr); | ||
| } | ||
| let mut group_contains_paste = false; | ||
| let mut expanded = TokenStream::new(); | ||
| let mut nested_attr = TokenStream::new(); | ||
| for tt in group.stream().into_iter() { | ||
| match &tt { | ||
| TokenTree::Punct(punct) if punct.as_char() == ',' => { | ||
| expanded.extend(expand_attr( | ||
| nested_attr, | ||
| group.span(), | ||
| &mut group_contains_paste, | ||
| )?); | ||
| expanded.extend(iter::once(tt)); | ||
| nested_attr = TokenStream::new(); | ||
| } | ||
| _ => nested_attr.extend(iter::once(tt)), | ||
| } | ||
| } | ||
| if !nested_attr.is_empty() { | ||
| expanded.extend(expand_attr( | ||
| nested_attr, | ||
| group.span(), | ||
| &mut group_contains_paste, | ||
| )?); | ||
| } | ||
| if group_contains_paste { | ||
| *contains_paste = true; | ||
| let mut group = Group::new(Delimiter::Parenthesis, expanded); | ||
| group.set_span(span); | ||
| Ok(attr | ||
| .into_iter() | ||
| // Just keep the initial ident in `#[ident(...)]`. | ||
| .take(1) | ||
| .chain(iter::once(TokenTree::Group(group))) | ||
| .collect()) | ||
| } else { | ||
| Ok(attr) | ||
| } | ||
| } | ||
| fn do_paste_name_value_attr(attr: TokenStream, span: Span) -> Result<TokenStream> { | ||
| let mut expanded = TokenStream::new(); | ||
| let mut tokens = attr.into_iter().peekable(); | ||
| expanded.extend(tokens.by_ref().take(2)); // `doc =` | ||
| let mut segments = segment::parse(&mut tokens)?; | ||
| for segment in &mut segments { | ||
| if let Segment::String(string) = segment { | ||
| if let Some(open_quote) = string.value.find('"') { | ||
| if open_quote == 0 { | ||
| string.value.truncate(string.value.len() - 1); | ||
| string.value.remove(0); | ||
| } else { | ||
| let begin = open_quote + 1; | ||
| let end = string.value.rfind('"').unwrap(); | ||
| let raw_string = mem::replace(&mut string.value, String::new()); | ||
| for ch in raw_string[begin..end].chars() { | ||
| string.value.extend(ch.escape_default()); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| let mut lit = segment::paste(&segments)?; | ||
| lit.insert(0, '"'); | ||
| lit.push('"'); | ||
| let mut lit = TokenStream::from_str(&lit) | ||
| .unwrap() | ||
| .into_iter() | ||
| .next() | ||
| .unwrap(); | ||
| lit.set_span(span); | ||
| expanded.extend(iter::once(lit)); | ||
| Ok(expanded) | ||
| } | ||
| fn is_stringlike(token: &TokenTree) -> bool { | ||
| match token { | ||
| TokenTree::Ident(_) => true, | ||
| TokenTree::Literal(literal) => { | ||
| let repr = literal.to_string(); | ||
| !repr.starts_with('b') && !repr.starts_with('\'') | ||
| } | ||
| TokenTree::Group(group) => { | ||
| if group.delimiter() != Delimiter::None { | ||
| return false; | ||
| } | ||
| let mut inner = group.stream().into_iter(); | ||
| match inner.next() { | ||
| Some(first) => inner.next().is_none() && is_stringlike(&first), | ||
| None => false, | ||
| } | ||
| } | ||
| TokenTree::Punct(punct) => punct.as_char() == '\'' || punct.as_char() == ':', | ||
| } | ||
| } |
| use paste::paste; | ||
| #[test] | ||
| fn test_paste_cfg() { | ||
| macro_rules! m { | ||
| ($ret:ident, $width:expr) => { | ||
| paste! { | ||
| #[cfg(any(feature = "protocol_feature_" $ret:snake, target_pointer_width = "" $width))] | ||
| fn new() -> $ret { todo!() } | ||
| } | ||
| }; | ||
| } | ||
| struct Paste; | ||
| #[cfg(target_pointer_width = "64")] | ||
| m!(Paste, 64); | ||
| #[cfg(target_pointer_width = "32")] | ||
| m!(Paste, 32); | ||
| let _ = new; | ||
| } |
| { | ||
| "git": { | ||
| "sha1": "6a5265f7a937412fb1da72fb72fd32bbaffecebc" | ||
| "sha1": "a3e4ace7092f5f0b750efe22fe8c4b65e8495d94" | ||
| } | ||
| } |
+1
-1
@@ -16,3 +16,3 @@ # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO | ||
| name = "paste" | ||
| version = "1.0.2" | ||
| version = "1.0.3" | ||
| authors = ["David Tolnay <dtolnay@gmail.com>"] | ||
@@ -19,0 +19,0 @@ description = "Macros for all your token pasting needs" |
+11
-12
@@ -143,7 +143,7 @@ //! [![github]](https://github.com/dtolnay/paste) [![crates-io]](https://crates.io/crates/paste) [![docs-rs]](https://docs.rs/paste) | ||
| mod doc; | ||
| mod attr; | ||
| mod error; | ||
| mod segment; | ||
| use crate::doc::{do_paste_doc, is_pasted_doc}; | ||
| use crate::attr::expand_attr; | ||
| use crate::error::{Error, Result}; | ||
@@ -210,14 +210,13 @@ use crate::segment::Segment; | ||
| *contains_paste = true; | ||
| } else if delimiter == Delimiter::Bracket | ||
| && (lookbehind == Lookbehind::Pound || lookbehind == Lookbehind::PoundBang) | ||
| && is_pasted_doc(&content) | ||
| { | ||
| let pasted = do_paste_doc(&content, span)?; | ||
| let mut group = Group::new(delimiter, pasted); | ||
| group.set_span(span); | ||
| expanded.extend(iter::once(TokenTree::Group(group))); | ||
| *contains_paste = true; | ||
| } else { | ||
| let mut group_contains_paste = false; | ||
| let nested = expand(content, &mut group_contains_paste)?; | ||
| let nested = match delimiter { | ||
| Delimiter::Bracket if lookbehind == Lookbehind::Pound => { | ||
| expand_attr(content, span, &mut group_contains_paste)? | ||
| } | ||
| Delimiter::Bracket if lookbehind == Lookbehind::PoundBang => { | ||
| expand_attr(content, span, &mut group_contains_paste)? | ||
| } | ||
| _ => expand(content, &mut group_contains_paste)?, | ||
| }; | ||
| let group = if group_contains_paste { | ||
@@ -224,0 +223,0 @@ let mut group = Group::new(delimiter, nested); |
-90
| use crate::error::Result; | ||
| use crate::segment::{self, Segment}; | ||
| use proc_macro::{Delimiter, Span, TokenStream, TokenTree}; | ||
| use std::iter; | ||
| use std::mem; | ||
| use std::str::FromStr; | ||
| pub fn is_pasted_doc(input: &TokenStream) -> bool { | ||
| #[derive(PartialEq)] | ||
| enum State { | ||
| Init, | ||
| Doc, | ||
| Equal, | ||
| First, | ||
| Rest, | ||
| } | ||
| let mut state = State::Init; | ||
| for tt in input.clone() { | ||
| state = match (state, &tt) { | ||
| (State::Init, TokenTree::Ident(ident)) if ident.to_string() == "doc" => State::Doc, | ||
| (State::Doc, TokenTree::Punct(punct)) if punct.as_char() == '=' => State::Equal, | ||
| (State::Equal, tt) if is_stringlike(tt) => State::First, | ||
| (State::First, tt) | (State::Rest, tt) if is_stringlike(tt) => State::Rest, | ||
| _ => return false, | ||
| }; | ||
| } | ||
| state == State::Rest | ||
| } | ||
| pub fn do_paste_doc(attr: &TokenStream, span: Span) -> Result<TokenStream> { | ||
| let mut expanded = TokenStream::new(); | ||
| let mut tokens = attr.clone().into_iter().peekable(); | ||
| expanded.extend(tokens.by_ref().take(2)); // `doc =` | ||
| let mut segments = segment::parse(&mut tokens)?; | ||
| for segment in &mut segments { | ||
| if let Segment::String(string) = segment { | ||
| if let Some(open_quote) = string.value.find('"') { | ||
| if open_quote == 0 { | ||
| string.value.truncate(string.value.len() - 1); | ||
| string.value.remove(0); | ||
| } else { | ||
| let begin = open_quote + 1; | ||
| let end = string.value.rfind('"').unwrap(); | ||
| let raw_string = mem::replace(&mut string.value, String::new()); | ||
| for ch in raw_string[begin..end].chars() { | ||
| string.value.extend(ch.escape_default()); | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
| let mut lit = segment::paste(&segments)?; | ||
| lit.insert(0, '"'); | ||
| lit.push('"'); | ||
| let mut lit = TokenStream::from_str(&lit) | ||
| .unwrap() | ||
| .into_iter() | ||
| .next() | ||
| .unwrap(); | ||
| lit.set_span(span); | ||
| expanded.extend(iter::once(lit)); | ||
| Ok(expanded) | ||
| } | ||
| fn is_stringlike(token: &TokenTree) -> bool { | ||
| match token { | ||
| TokenTree::Ident(_) => true, | ||
| TokenTree::Literal(literal) => { | ||
| let repr = literal.to_string(); | ||
| !repr.starts_with('b') && !repr.starts_with('\'') | ||
| } | ||
| TokenTree::Group(group) => { | ||
| if group.delimiter() != Delimiter::None { | ||
| return false; | ||
| } | ||
| let mut inner = group.stream().into_iter(); | ||
| match inner.next() { | ||
| Some(first) => inner.next().is_none() && is_stringlike(&first), | ||
| None => false, | ||
| } | ||
| } | ||
| TokenTree::Punct(punct) => punct.as_char() == '\'' || punct.as_char() == ':', | ||
| } | ||
| } |
Sorry, the diff of this file is not supported yet