Skip to content

Commit

Permalink
Auto merge of #48842 - petrochenkov:under, r=nikomatsakis
Browse files Browse the repository at this point in the history
syntax: Make `_` a reserved identifier

Why:
- Lexically `_` is an identifier.
- Internally it makes implementation of `use Trait as _;` (#48216) and some other things cleaner.
- We prevent the externally observable effect of `_` being accepted by macros expecting `ident` by treating `_` specially in the `ident` matcher:
```rust
macro_rules! m {
    ($i: ident) => { let $i = 10; }
}

m!(_); // Still an error
```
  • Loading branch information
bors committed Mar 17, 2018
2 parents adf2135 + ed5ea5c commit ca6a984
Show file tree
Hide file tree
Showing 21 changed files with 153 additions and 136 deletions.
2 changes: 0 additions & 2 deletions src/libproc_macro/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -680,7 +680,6 @@ impl TokenTree {
Pound => op!('#'),
Dollar => op!('$'),
Question => op!('?'),
Underscore => op!('_'),

Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),
Expand Down Expand Up @@ -743,7 +742,6 @@ impl TokenTree {
'#' => Pound,
'$' => Dollar,
'?' => Question,
'_' => Underscore,
_ => panic!("unsupported character {}", op),
};

Expand Down
2 changes: 1 addition & 1 deletion src/librustc/hir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ impl LifetimeName {
use self::LifetimeName::*;
match *self {
Implicit => keywords::Invalid.name(),
Underscore => Symbol::intern("'_"),
Underscore => keywords::UnderscoreLifetime.name(),
Static => keywords::StaticLifetime.name(),
Name(name) => name,
}
Expand Down
1 change: 0 additions & 1 deletion src/librustc/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,6 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
token::Token::Pound |
token::Token::Dollar |
token::Token::Question |
token::Token::Underscore |
token::Token::Whitespace |
token::Token::Comment |
token::Token::Eof => {}
Expand Down
6 changes: 4 additions & 2 deletions src/librustc_passes/ast_validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,17 @@ impl<'a> AstValidator<'a> {
}

fn check_lifetime(&self, lifetime: &Lifetime) {
let valid_names = [keywords::StaticLifetime.name(), keywords::Invalid.name()];
let valid_names = [keywords::UnderscoreLifetime.name(),
keywords::StaticLifetime.name(),
keywords::Invalid.name()];
if !valid_names.contains(&lifetime.ident.name) &&
token::Ident(lifetime.ident.without_first_quote()).is_reserved_ident() {
self.err_handler().span_err(lifetime.span, "lifetimes cannot use keyword names");
}
}

fn check_label(&self, label: Ident, span: Span) {
if token::Ident(label.without_first_quote()).is_reserved_ident() || label.name == "'_" {
if token::Ident(label.without_first_quote()).is_reserved_ident() {
self.err_handler().span_err(span, &format!("invalid label name `{}`", label.name));
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/html/highlight.rs
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ impl<'a> Classifier<'a> {

token::Lifetime(..) => Class::Lifetime,

token::Underscore | token::Eof | token::Interpolated(..) |
token::Eof | token::Interpolated(..) |
token::Tilde | token::At | token::DotEq => Class::None,
};

Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/diagnostics/plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use ext::base::{ExtCtxt, MacEager, MacResult};
use ext::build::AstBuilder;
use parse::token;
use ptr::P;
use symbol::Symbol;
use symbol::{keywords, Symbol};
use tokenstream::{TokenTree};
use util::small_vector::SmallVector;

Expand Down Expand Up @@ -192,7 +192,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
(descriptions.len(), ecx.expr_vec(span, descriptions))
});

let static_ = ecx.lifetime(span, Ident::from_str("'static"));
let static_ = ecx.lifetime(span, keywords::StaticLifetime.ident());
let ty_str = ecx.ty_rptr(
span,
ecx.ty_ident(span, ecx.ident_of("str")),
Expand Down
1 change: 0 additions & 1 deletion src/libsyntax/ext/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -709,7 +709,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::Pound => "Pound",
token::Dollar => "Dollar",
token::Question => "Question",
token::Underscore => "Underscore",
token::Eof => "Eof",

token::Whitespace | token::Comment | token::Shebang(_) => {
Expand Down
39 changes: 20 additions & 19 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ use self::TokenTreeOrTokenTreeVec::*;

use ast::Ident;
use syntax_pos::{self, BytePos, Span};
use codemap::Spanned;
use codemap::respan;
use errors::FatalError;
use ext::tt::quoted::{self, TokenTree};
use parse::{Directory, ParseSess};
Expand Down Expand Up @@ -709,6 +709,15 @@ pub fn parse(
}
}

/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<Ident> {
match *token {
token::Ident(ident) if ident.name != keywords::Underscore.name() => Some(ident),
_ => None,
}
}

/// Checks whether a non-terminal may begin with a particular token.
///
/// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that
Expand All @@ -725,7 +734,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
match name {
"expr" => token.can_begin_expr(),
"ty" => token.can_begin_type(),
"ident" => token.is_ident(),
"ident" => get_macro_ident(token).is_some(),
"vis" => match *token {
// The follow-set of :vis + "priv" keyword + interpolated
Token::Comma | Token::Ident(_) | Token::Interpolated(_) => true,
Expand Down Expand Up @@ -765,8 +774,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
Token::DotDotDot | // range pattern (future compat)
Token::ModSep | // path
Token::Lt | // path (UFCS constant)
Token::BinOp(token::Shl) | // path (double UFCS)
Token::Underscore => true, // placeholder
Token::BinOp(token::Shl) => true, // path (double UFCS)
Token::Interpolated(ref nt) => may_be_ident(&nt.0),
_ => false,
},
Expand Down Expand Up @@ -815,21 +823,14 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
"expr" => token::NtExpr(panictry!(p.parse_expr())),
"ty" => token::NtTy(panictry!(p.parse_ty())),
// this could be handled like a token, since it is one
"ident" => match p.token {
token::Ident(sn) => {
p.bump();
token::NtIdent(Spanned::<Ident> {
node: sn,
span: p.prev_span,
})
}
_ => {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}", &token_str[..]))
.emit();
FatalError.raise()
}
},
"ident" => if let Some(ident) = get_macro_ident(&p.token) {
p.bump();
token::NtIdent(respan(p.prev_span, ident))
} else {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}", &token_str)).emit();
FatalError.raise()
}
"path" => token::NtPath(panictry!(p.parse_path_common(PathStyle::Type, false))),
"meta" => token::NtMeta(panictry!(p.parse_meta_item())),
"vis" => token::NtVis(panictry!(p.parse_visibility(true))),
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/feature_gate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1790,7 +1790,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}

fn visit_lifetime(&mut self, lt: &'a ast::Lifetime) {
if lt.ident.name == "'_" {
if lt.ident.name == keywords::UnderscoreLifetime.name() {
gate_feature_post!(&self, underscore_lifetimes, lt.span,
"underscore lifetimes are unstable");
}
Expand Down
14 changes: 4 additions & 10 deletions src/libsyntax/parse/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ pub struct TokenAndSpan {

impl Default for TokenAndSpan {
fn default() -> Self {
TokenAndSpan { tok: token::Underscore, sp: syntax_pos::DUMMY_SP }
TokenAndSpan { tok: token::Whitespace, sp: syntax_pos::DUMMY_SP }
}
}

Expand Down Expand Up @@ -126,7 +126,7 @@ impl<'a> StringReader<'a> {
pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
assert!(self.fatal_errs.is_empty());
let ret_val = TokenAndSpan {
tok: replace(&mut self.peek_tok, token::Underscore),
tok: replace(&mut self.peek_tok, token::Whitespace),
sp: self.peek_span,
};
self.advance_token()?;
Expand Down Expand Up @@ -1133,14 +1133,8 @@ impl<'a> StringReader<'a> {
self.bump();
}

return Ok(self.with_str_from(start, |string| {
if string == "_" {
token::Underscore
} else {
// FIXME: perform NFKC normalization here. (Issue #2253)
token::Ident(self.mk_ident(string))
}
}));
// FIXME: perform NFKC normalization here. (Issue #2253)
return Ok(self.with_str_from(start, |string| token::Ident(self.mk_ident(string))));
}

if is_dec_digit(c) {
Expand Down
36 changes: 16 additions & 20 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,7 @@ impl<'a> Parser<'a> {
-> Self {
let mut parser = Parser {
sess,
token: token::Underscore,
token: token::Whitespace,
span: syntax_pos::DUMMY_SP,
prev_span: syntax_pos::DUMMY_SP,
meta_var_span: None,
Expand Down Expand Up @@ -800,11 +800,7 @@ impl<'a> Parser<'a> {
Err(if self.prev_token_kind == PrevTokenKind::DocComment {
self.span_fatal_err(self.prev_span, Error::UselessDocComment)
} else {
let mut err = self.expected_ident_found();
if self.token == token::Underscore {
err.note("`_` is a wildcard pattern, not an identifier");
}
err
self.expected_ident_found()
})
}
}
Expand Down Expand Up @@ -1602,7 +1598,7 @@ impl<'a> Parser<'a> {
let e = self.parse_expr()?;
self.expect(&token::CloseDelim(token::Paren))?;
TyKind::Typeof(e)
} else if self.eat(&token::Underscore) {
} else if self.eat_keyword(keywords::Underscore) {
// A type to be inferred `_`
TyKind::Infer
} else if self.token_is_bare_fn_keyword() {
Expand Down Expand Up @@ -1796,7 +1792,7 @@ impl<'a> Parser<'a> {
_ => 0,
};

self.look_ahead(offset, |t| t.is_ident() || t == &token::Underscore) &&
self.look_ahead(offset, |t| t.is_ident()) &&
self.look_ahead(offset + 1, |t| t == &token::Colon)
}

Expand Down Expand Up @@ -2782,7 +2778,7 @@ impl<'a> Parser<'a> {
},
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
let (token, span) = (mem::replace(&mut self.token, token::Underscore), self.span);
let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
self.bump();
TokenTree::Token(span, token)
}
Expand Down Expand Up @@ -3815,11 +3811,6 @@ impl<'a> Parser<'a> {
let lo = self.span;
let pat;
match self.token {
token::Underscore => {
// Parse _
self.bump();
pat = PatKind::Wild;
}
token::BinOp(token::And) | token::AndAnd => {
// Parse &pat / &mut pat
self.expect_and()?;
Expand Down Expand Up @@ -3849,8 +3840,11 @@ impl<'a> Parser<'a> {
self.expect(&token::CloseDelim(token::Bracket))?;
pat = PatKind::Slice(before, slice, after);
}
// At this point, token != _, &, &&, (, [
_ => if self.eat_keyword(keywords::Mut) {
// At this point, token != &, &&, (, [
_ => if self.eat_keyword(keywords::Underscore) {
// Parse _
pat = PatKind::Wild;
} else if self.eat_keyword(keywords::Mut) {
// Parse mut ident @ pat / mut ref ident @ pat
let mutref_span = self.prev_span.to(self.span);
let binding_mode = if self.eat_keyword(keywords::Ref) {
Expand Down Expand Up @@ -7065,10 +7059,12 @@ impl<'a> Parser<'a> {

fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
if self.eat_keyword(keywords::As) {
if self.eat(&token::Underscore) {
Ok(Some(Ident::with_empty_ctxt(Symbol::gensym("_"))))
} else {
self.parse_ident().map(Some)
match self.token {
token::Ident(ident) if ident.name == keywords::Underscore.name() => {
self.bump(); // `_`
Ok(Some(Ident { name: ident.name.gensymed(), ..ident }))
}
_ => self.parse_ident().map(Some),
}
} else {
Ok(None)
Expand Down
9 changes: 4 additions & 5 deletions src/libsyntax/parse/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ fn ident_can_begin_type(ident: ast::Ident) -> bool {
!ident_token.is_reserved_ident() ||
ident_token.is_path_segment_keyword() ||
[
keywords::Underscore.name(),
keywords::For.name(),
keywords::Impl.name(),
keywords::Fn.name(),
Expand Down Expand Up @@ -175,7 +176,6 @@ pub enum Token {

/* Name components */
Ident(ast::Ident),
Underscore,
Lifetime(ast::Ident),

// The `LazyTokenStream` is a pure function of the `Nonterminal`,
Expand Down Expand Up @@ -242,7 +242,6 @@ impl Token {
Ident(ident) => ident_can_begin_type(ident), // type name or keyword
OpenDelim(Paren) | // tuple
OpenDelim(Bracket) | // array
Underscore | // placeholder
Not | // never
BinOp(Star) | // raw pointer
BinOp(And) | // reference
Expand Down Expand Up @@ -371,7 +370,7 @@ impl Token {
// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special_ident(&self) -> bool {
match self.ident() {
Some(id) => id.name <= keywords::DollarCrate.name(),
Some(id) => id.name <= keywords::Underscore.name(),
_ => false,
}
}
Expand Down Expand Up @@ -441,7 +440,7 @@ impl Token {

Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | DotEq |
DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar |
Question | OpenDelim(..) | CloseDelim(..) | Underscore => return None,
Question | OpenDelim(..) | CloseDelim(..) => return None,

Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) |
Whitespace | Comment | Shebang(..) | Eof => return None,
Expand Down Expand Up @@ -573,7 +572,7 @@ impl fmt::Debug for Nonterminal {
pub fn is_op(tok: &Token) -> bool {
match *tok {
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
Ident(..) | Underscore | Lifetime(..) | Interpolated(..) |
Ident(..) | Lifetime(..) | Interpolated(..) |
Whitespace | Comment | Shebang(..) | Eof => false,
_ => true,
}
Expand Down
1 change: 0 additions & 1 deletion src/libsyntax/print/pprust.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,6 @@ pub fn token_to_string(tok: &Token) -> String {
/* Name components */
token::Ident(s) => s.to_string(),
token::Lifetime(s) => s.to_string(),
token::Underscore => "_".to_string(),

/* Other */
token::DocComment(s) => s.to_string(),
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax_ext/env.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use syntax::ast::{self, Ident};
use syntax::ext::base::*;
use syntax::ext::base;
use syntax::ext::build::AstBuilder;
use syntax::symbol::Symbol;
use syntax::symbol::{keywords, Symbol};
use syntax_pos::Span;
use syntax::tokenstream;

Expand All @@ -35,14 +35,14 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt,
let sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
let e = match env::var(&*var.as_str()) {
Err(..) => {
let lt = cx.lifetime(sp, keywords::StaticLifetime.ident());
cx.expr_path(cx.path_all(sp,
true,
cx.std_path(&["option", "Option", "None"]),
Vec::new(),
vec![cx.ty_rptr(sp,
cx.ty_ident(sp, Ident::from_str("str")),
Some(cx.lifetime(sp,
Ident::from_str("'static"))),
Some(lt),
ast::Mutability::Immutable)],
Vec::new()))
}
Expand Down
Loading

0 comments on commit ca6a984

Please sign in to comment.