Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implementation of RFC 2151, Raw Identifiers #48942

Merged
merged 7 commits into from
Mar 23, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions src/libproc_macro/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -681,7 +681,8 @@ impl TokenTree {
Dollar => op!('$'),
Question => op!('?'),

Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Ident(ident, false) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Ident(ident, true) => TokenNode::Term(Term(Symbol::intern(&format!("r#{}", ident)))),
Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),

Interpolated(_) => {
Expand Down Expand Up @@ -713,8 +714,14 @@ impl TokenTree {
},
TokenNode::Term(symbol) => {
let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt() };
let sym_str = symbol.0.as_str();
let token =
if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) };
if sym_str.starts_with("'") { Lifetime(ident) }
else if sym_str.starts_with("r#") {
let name = Symbol::intern(&sym_str[2..]);
let ident = ast::Ident { name, ctxt: self.span.0.ctxt() };
Ident(ident, true)
} else { Ident(ident, false) };
return TokenTree::Token(self.span.0, token).into();
}
TokenNode::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(),
Expand Down
5 changes: 4 additions & 1 deletion src/librustc/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,10 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
opt_name.hash_stable(hcx, hasher);
}

token::Token::Ident(ident) |
token::Token::Ident(ident, is_raw) => {
ident.name.hash_stable(hcx, hasher);
is_raw.hash_stable(hcx, hasher);
}
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),

token::Token::Interpolated(_) => {
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_passes/ast_validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,13 @@ impl<'a> AstValidator<'a> {
keywords::StaticLifetime.name(),
keywords::Invalid.name()];
if !valid_names.contains(&lifetime.ident.name) &&
token::Ident(lifetime.ident.without_first_quote()).is_reserved_ident() {
token::is_reserved_ident(lifetime.ident.without_first_quote()) {
self.err_handler().span_err(lifetime.span, "lifetimes cannot use keyword names");
}
}

fn check_label(&self, label: Ident, span: Span) {
if token::Ident(label.without_first_quote()).is_reserved_ident() {
if token::is_reserved_ident(label.without_first_quote()) {
self.err_handler().span_err(span, &format!("invalid label name `{}`", label.name));
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_resolve/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3206,7 +3206,7 @@ impl<'a> Resolver<'a> {
// `$crate::a::b`
module = Some(self.resolve_crate_root(ident.node.ctxt, true));
continue
} else if i == 1 && !token::Ident(ident.node).is_path_segment_keyword() {
} else if i == 1 && !token::is_path_segment_keyword(ident.node) {
let prev_name = path[0].node.name;
if prev_name == keywords::Extern.name() ||
prev_name == keywords::CrateRoot.name() &&
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_resolve/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ impl<'a> base::Resolver for Resolver<'a> {
if k > 0 {
tokens.push(TokenTree::Token(path.span, Token::ModSep).into());
}
let tok = Token::Ident(segment.identifier);
let tok = Token::from_ast_ident(segment.identifier);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I changed this in from_ast_ident, since it seems this is the best behavior in all these cases.

tokens.push(TokenTree::Token(path.span, tok).into());
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_resolve/resolve_imports.rs
Original file line number Diff line number Diff line change
Expand Up @@ -625,7 +625,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
} else {
Some(self.resolve_crate_root(source.ctxt.modern(), false))
}
} else if is_extern && !token::Ident(source).is_path_segment_keyword() {
} else if is_extern && !token::is_path_segment_keyword(source) {
let crate_id =
self.crate_loader.resolve_crate_from_path(source.name, directive.span);
let crate_root =
Expand Down Expand Up @@ -667,7 +667,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
}
PathResult::Failed(span, msg, true) => {
let (mut self_path, mut self_result) = (module_path.clone(), None);
let is_special = |ident| token::Ident(ident).is_path_segment_keyword() &&
let is_special = |ident| token::is_path_segment_keyword(ident) &&
ident.name != keywords::CrateRoot.name();
if !self_path.is_empty() && !is_special(self_path[0].node) &&
!(self_path.len() > 1 && is_special(self_path[1].node)) {
Expand Down
8 changes: 4 additions & 4 deletions src/librustdoc/html/highlight.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,12 +323,12 @@ impl<'a> Classifier<'a> {
}

// Keywords are also included in the identifier set.
token::Ident(ident) => {
token::Ident(ident, is_raw) => {
match &*ident.name.as_str() {
"ref" | "mut" => Class::RefKeyWord,
"ref" | "mut" if !is_raw => Class::RefKeyWord,

"self" |"Self" => Class::Self_,
"false" | "true" => Class::Bool,
"self" | "Self" => Class::Self_,
"false" | "true" if !is_raw => Class::Bool,

"Option" | "Result" => Class::PreludeTy,
"Some" | "None" | "Ok" | "Err" => Class::PreludeVal,
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ impl Path {
// or starts with something like `self`/`super`/`$crate`/etc.
pub fn make_root(&self) -> Option<PathSegment> {
if let Some(ident) = self.segments.get(0).map(|seg| seg.identifier) {
if ::parse::token::Ident(ident).is_path_segment_keyword() &&
if ::parse::token::is_path_segment_keyword(ident) &&
ident.name != keywords::Crate.name() {
return None;
}
Expand Down
13 changes: 7 additions & 6 deletions src/libsyntax/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1106,17 +1106,18 @@ impl IntType {

impl MetaItem {
fn tokens(&self) -> TokenStream {
let ident = TokenTree::Token(self.span, Token::Ident(Ident::with_empty_ctxt(self.name)));
let ident = TokenTree::Token(self.span,
Token::from_ast_ident(Ident::with_empty_ctxt(self.name)));
TokenStream::concat(vec![ident.into(), self.node.tokens(self.span)])
}

fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
where I: Iterator<Item = TokenTree>,
{
let (span, name) = match tokens.next() {
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
Some(TokenTree::Token(span, Token::Ident(ident, _))) => (span, ident.name),
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
token::Nonterminal::NtIdent(ident, _) => (ident.span, ident.node.name),
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
_ => return None,
},
Expand Down Expand Up @@ -1269,14 +1270,14 @@ impl LitKind {
"true"
} else {
"false"
}))),
})), false),
}
}

fn from_token(token: Token) -> Option<LitKind> {
match token {
Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)),
Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)),
Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
Token::Interpolated(ref nt) => match nt.0 {
token::NtExpr(ref v) => match v.node {
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
Expand Down
10 changes: 5 additions & 5 deletions src/libsyntax/diagnostics/plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match (token_tree.len(), token_tree.get(0)) {
(1, Some(&TokenTree::Token(_, token::Ident(code)))) => code,
(1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
_ => unreachable!()
};

Expand Down Expand Up @@ -82,10 +82,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
token_tree.get(1),
token_tree.get(2)
) {
(1, Some(&TokenTree::Token(_, token::Ident(ref code))), None, None) => {
(1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
(code, None)
},
(3, Some(&TokenTree::Token(_, token::Ident(ref code))),
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
Some(&TokenTree::Token(_, token::Comma)),
Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
(code, Some(description))
Expand Down Expand Up @@ -150,9 +150,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
(
// Crate name.
&TokenTree::Token(_, token::Ident(ref crate_name)),
&TokenTree::Token(_, token::Ident(ref crate_name, _)),
// DIAGNOSTICS ident.
&TokenTree::Token(_, token::Ident(ref name))
&TokenTree::Token(_, token::Ident(ref name, _))
) => (*&crate_name, name),
_ => unreachable!()
};
Expand Down
5 changes: 3 additions & 2 deletions src/libsyntax/ext/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -229,8 +229,9 @@ impl<F> TTMacroExpander for F
impl Folder for AvoidInterpolatedIdents {
fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree {
if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt {
if let token::NtIdent(ident) = nt.0 {
return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node));
if let token::NtIdent(ident, is_raw) = nt.0 {
return tokenstream::TokenTree::Token(ident.span,
token::Ident(ident.node, is_raw));
}
}
fold::noop_fold_tt(tt, self)
Expand Down
12 changes: 7 additions & 5 deletions src/libsyntax/ext/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ pub mod rt {

impl ToTokens for ast::Ident {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))]
vec![TokenTree::Token(DUMMY_SP, Token::from_ast_ident(*self))]
}
}

Expand Down Expand Up @@ -238,7 +238,9 @@ pub mod rt {
if i > 0 {
inner.push(TokenTree::Token(self.span, token::Colon).into());
}
inner.push(TokenTree::Token(self.span, token::Ident(segment.identifier)).into());
inner.push(TokenTree::Token(
self.span, token::Token::from_ast_ident(segment.identifier)
).into());
}
inner.push(self.tokens.clone());

Expand Down Expand Up @@ -658,10 +660,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::Literal(token::ByteStr(i), suf) => return mk_lit!("ByteStr", suf, i),
token::Literal(token::ByteStrRaw(i, n), suf) => return mk_lit!("ByteStrRaw", suf, i, n),

token::Ident(ident) => {
token::Ident(ident, is_raw) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "Ident"),
vec![mk_ident(cx, sp, ident)]);
vec![mk_ident(cx, sp, ident), cx.expr_bool(sp, is_raw)]);
}

token::Lifetime(ident) => {
Expand Down Expand Up @@ -720,7 +722,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {

fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> {
match *tt {
TokenTree::Token(sp, token::Ident(ident)) if quoted => {
TokenTree::Token(sp, token::Ident(ident, _)) if quoted => {
// tt.extend($ident.to_tokens(ext_cx))

let e_to_toks =
Expand Down
21 changes: 11 additions & 10 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -364,8 +364,8 @@ pub fn parse_failure_msg(tok: Token) -> String {

/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
fn token_name_eq(t1: &Token, t2: &Token) -> bool {
if let (Some(id1), Some(id2)) = (t1.ident(), t2.ident()) {
id1.name == id2.name
if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
id1.name == id2.name && is_raw1 == is_raw2
} else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) {
id1.name == id2.name
} else {
Expand Down Expand Up @@ -711,9 +711,10 @@ pub fn parse(

/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<Ident> {
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
match *token {
token::Ident(ident) if ident.name != keywords::Underscore.name() => Some(ident),
token::Ident(ident, is_raw) if ident.name != keywords::Underscore.name() =>
Some((ident, is_raw)),
_ => None,
}
}
Expand All @@ -737,7 +738,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
"ident" => get_macro_ident(token).is_some(),
"vis" => match *token {
// The follow-set of :vis + "priv" keyword + interpolated
Token::Comma | Token::Ident(_) | Token::Interpolated(_) => true,
Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true,
_ => token.can_begin_type(),
},
"block" => match *token {
Expand All @@ -746,7 +747,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
token::NtItem(_)
| token::NtPat(_)
| token::NtTy(_)
| token::NtIdent(_)
| token::NtIdent(..)
| token::NtMeta(_)
| token::NtPath(_)
| token::NtVis(_) => false, // none of these may start with '{'.
Expand All @@ -755,15 +756,15 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
_ => false,
},
"path" | "meta" => match *token {
Token::ModSep | Token::Ident(_) => true,
Token::ModSep | Token::Ident(..) => true,
Token::Interpolated(ref nt) => match nt.0 {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt.0),
},
_ => false,
},
"pat" => match *token {
Token::Ident(_) | // box, ref, mut, and other identifiers (can stricten)
Token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
Token::OpenDelim(token::Paren) | // tuple pattern
Token::OpenDelim(token::Bracket) | // slice pattern
Token::BinOp(token::And) | // reference
Expand Down Expand Up @@ -823,9 +824,9 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
"expr" => token::NtExpr(panictry!(p.parse_expr())),
"ty" => token::NtTy(panictry!(p.parse_ty())),
// this could be handled like a token, since it is one
"ident" => if let Some(ident) = get_macro_ident(&p.token) {
"ident" => if let Some((ident, is_raw)) = get_macro_ident(&p.token) {
p.bump();
token::NtIdent(respan(p.prev_span, ident))
token::NtIdent(respan(p.prev_span, ident), is_raw)
} else {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}", &token_str)).emit();
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -831,7 +831,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
"pat" => match *tok {
TokenTree::Token(_, ref tok) => match *tok {
FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
Ident(i) if i.name == "if" || i.name == "in" => Ok(true),
Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true),
_ => Ok(false)
},
_ => Ok(false),
Expand All @@ -840,7 +840,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
TokenTree::Token(_, ref tok) => match *tok {
OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
Ident(i) if i.name == "as" || i.name == "where" => Ok(true),
Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true),
_ => Ok(false)
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
Expand All @@ -860,7 +860,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
match *tok {
TokenTree::Token(_, ref tok) => match *tok {
Comma => Ok(true),
Ident(i) if i.name != "priv" => Ok(true),
Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true),
ref tok => Ok(tok.can_begin_type())
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident"
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/ext/tt/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ pub fn parse(
let span = match trees.next() {
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
Some(kind) => {
Some((kind, _)) => {
let span = end_sp.with_lo(start_sp.lo());
result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue;
Expand Down Expand Up @@ -289,14 +289,14 @@ where
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
// metavariable that names the crate of the invokation.
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
let ident = token.ident().unwrap();
let (ident, _) = token.ident().unwrap();
let span = ident_span.with_lo(span.lo());
if ident.name == keywords::Crate.name() {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, I think this will accept $r#crate in the crate root meaning while we should treat it as a usual metavar named "crate".

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you add a test case for this as well?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Currently r#crate is forbidden along with the rest of the keywords matching is_path_segment_keyword.

let ident = ast::Ident {
name: keywords::DollarCrate.name(),
..ident
};
TokenTree::Token(span, token::Ident(ident))
TokenTree::Token(span, token::Ident(ident, false))
} else {
TokenTree::MetaVar(span, ident)
}
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/tt/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ pub fn transcribe(cx: &ExtCtxt,
Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
result.push(TokenTree::Token(sp, token::Dollar).into());
result.push(TokenTree::Token(sp, token::Ident(ident)).into());
result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into());
}
}
quoted::TokenTree::Delimited(mut span, delimited) => {
Expand Down
Loading