Skip to content
This repository has been archived by the owner on Oct 6, 2024. It is now read-only.

Commit

Permalink
Merge pull request #48 from dtolnay/doc
Browse files Browse the repository at this point in the history
Paste tokens inside of doc comments
  • Loading branch information
dtolnay authored Jul 26, 2020
2 parents 66a9236 + d054eea commit f731b81
Show file tree
Hide file tree
Showing 3 changed files with 155 additions and 11 deletions.
88 changes: 88 additions & 0 deletions src/doc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
use proc_macro::{Delimiter, Span, TokenStream, TokenTree};
use std::iter;
use std::str::FromStr;

pub fn is_pasted_doc(input: &TokenStream) -> bool {
#[derive(PartialEq)]
enum State {
Init,
Doc,
Equal,
First,
Rest,
}

let mut state = State::Init;
for tt in input.clone() {
state = match (state, &tt) {
(State::Init, TokenTree::Ident(ident)) if ident.to_string() == "doc" => State::Doc,
(State::Doc, TokenTree::Punct(punct)) if punct.as_char() == '=' => State::Equal,
(State::Equal, tt) if is_stringlike(tt) => State::First,
(State::First, tt) | (State::Rest, tt) if is_stringlike(tt) => State::Rest,
_ => return false,
};
}

state == State::Rest
}

pub fn do_paste_doc(attr: &TokenStream, span: Span) -> TokenStream {
let mut expanded = TokenStream::new();
let mut tokens = attr.clone().into_iter();
expanded.extend(tokens.by_ref().take(2)); // `doc =`

let mut lit = String::new();
lit.push('"');
for token in tokens {
lit += &escaped_string_value(&token).unwrap();
}
lit.push('"');

let mut lit = TokenStream::from_str(&lit)
.unwrap()
.into_iter()
.next()
.unwrap();
lit.set_span(span);
expanded.extend(iter::once(lit));
expanded
}

fn is_stringlike(token: &TokenTree) -> bool {
escaped_string_value(token).is_some()
}

fn escaped_string_value(token: &TokenTree) -> Option<String> {
match token {
TokenTree::Ident(ident) => Some(ident.to_string()),
TokenTree::Literal(literal) => {
let mut repr = literal.to_string();
if repr.starts_with('b') || repr.starts_with('\'') {
None
} else if repr.starts_with('"') {
repr.truncate(repr.len() - 1);
repr.remove(0);
Some(repr)
} else if repr.starts_with('r') {
let begin = repr.find('"').unwrap() + 1;
let end = repr.rfind('"').unwrap();
Some(repr[begin..end].escape_default().to_string())
} else {
Some(repr)
}
}
TokenTree::Group(group) => {
if group.delimiter() != Delimiter::None {
return None;
}
let mut inner = group.stream().into_iter();
let first = inner.next()?;
if inner.next().is_none() {
escaped_string_value(&first)
} else {
None
}
}
TokenTree::Punct(_) => None,
}
}
34 changes: 23 additions & 11 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,10 @@

#![allow(clippy::needless_doctest_main)]

mod doc;
mod error;

use crate::doc::{do_paste_doc, is_pasted_doc};
use crate::error::{Error, Result};
use proc_macro::{
token_stream, Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree,
Expand Down Expand Up @@ -201,6 +203,15 @@ fn expand(input: TokenStream, contains_paste: &mut bool) -> Result<TokenStream>
} else if delimiter == Delimiter::None && is_flat_group(&content) {
expanded.extend(content);
*contains_paste = true;
} else if delimiter == Delimiter::Bracket
&& matches!(lookbehind, Lookbehind::Pound | Lookbehind::PoundBang)
&& is_pasted_doc(&content)
{
let pasted = do_paste_doc(&content, span);
let mut group = Group::new(delimiter, pasted);
group.set_span(span);
expanded.extend(iter::once(TokenTree::Group(group)));
*contains_paste = true;
} else {
let mut group_contains_paste = false;
let nested = expand(content, &mut group_contains_paste)?;
Expand All @@ -223,19 +234,18 @@ fn expand(input: TokenStream, contains_paste: &mut bool) -> Result<TokenStream>
}
lookbehind = Lookbehind::Other;
}
Some(other) => {
lookbehind = match &other {
TokenTree::Punct(punct) if punct.as_char() == ':' => {
if lookbehind == Lookbehind::JointColon {
Lookbehind::DoubleColon
} else if punct.spacing() == Spacing::Joint {
Lookbehind::JointColon
} else {
Lookbehind::Other
}
}
Some(TokenTree::Punct(punct)) => {
lookbehind = match punct.as_char() {
':' if lookbehind == Lookbehind::JointColon => Lookbehind::DoubleColon,
':' if punct.spacing() == Spacing::Joint => Lookbehind::JointColon,
'#' => Lookbehind::Pound,
'!' if lookbehind == Lookbehind::Pound => Lookbehind::PoundBang,
_ => Lookbehind::Other,
};
expanded.extend(iter::once(TokenTree::Punct(punct)));
}
Some(other) => {
lookbehind = Lookbehind::Other;
expanded.extend(iter::once(other));
}
None => return Ok(expanded),
Expand All @@ -247,6 +257,8 @@ fn expand(input: TokenStream, contains_paste: &mut bool) -> Result<TokenStream>
enum Lookbehind {
JointColon,
DoubleColon,
Pound,
PoundBang,
Other,
}

Expand Down
44 changes: 44 additions & 0 deletions tests/test_doc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
use paste::paste;

#[test]
fn test_paste_doc() {
macro_rules! m {
($ret:ident) => {
paste! {
#[doc = "Create a new [`" $ret "`] object."]
fn new() -> $ret { todo!() }
}
};
}

struct Paste;
m!(Paste);

let _ = new;
}

macro_rules! get_doc {
(#[doc = $literal:tt]) => {
$literal
};
}

#[test]
fn test_escaping() {
let doc = paste! {
get_doc!(#[doc = "s\"" r#"r#""#])
};

let expected = "s\"r#\"";
assert_eq!(doc, expected);
}

#[test]
fn test_literals() {
let doc = paste! {
get_doc!(#[doc = "int=" 0x1 " bool=" true " float=" 0.01])
};

let expected = "int=0x1 bool=true float=0.01";
assert_eq!(doc, expected);
}

0 comments on commit f731b81

Please sign in to comment.