8000 syntax: Remove `Deref` impl from `Token` by petrochenkov · Pull Request #61669 · rust-lang/rust · GitHub
[go: up one dir, main page]

Skip to content

syntax: Remove Deref impl from Token #61669

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jun 9, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
syntax: Remove Deref impl from Token
  • Loading branch information
petrochenkov committed Jun 8, 2019
commit 25b05147b3ec0a1ed9df9614910a10171b8cf211
12 changes: 6 additions & 6 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ use crate::ast::{Ident, Name};
use crate::ext::tt::quoted::{self, TokenTree};
use crate::parse::{Directory, ParseSess};
use crate::parse::parser::{Parser, PathStyle};
use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
use crate::parse::token::{self, DocComment, Nonterminal, Token};
use crate::print::pprust;
use crate::symbol::{kw, sym, Symbol};
use crate::tokenstream::{DelimSpan, TokenStream};
Expand Down Expand Up @@ -417,12 +417,12 @@ fn nameize<I: Iterator<Item = NamedMatch>>(

/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
/// other tokens, this is "unexpected token...".
pub fn parse_failure_msg(tok: TokenKind) -> String {
match tok {
pub fn parse_failure_msg(tok: &Token) -> String {
match tok.kind {
token::Eof => "unexpected end of macro invocation".to_string(),
_ => format!(
"no rules expected the token `{}`",
pprust::token_to_string(&tok)
pprust::token_to_string(tok)
),
}
}
Expand Down Expand Up @@ -804,8 +804,8 @@ pub fn parse(

/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> {
match *token {
fn get_macro_name(token: &Token) -> Option<(Name, bool)> {
match token.kind {
token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)),
_ => None,
}
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,

let (token, label) = best_failure.expect("ran no matchers");
let span = token.span.substitute_dummy(sp);
let mut err = cx.struct_span_err(span, &parse_failure_msg(token.kind));
let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
err.span_label(span, label);
if let Some(sp) = def_span {
if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() {
Expand Down Expand Up @@ -288,7 +288,7 @@ pub fn compile(
let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) {
Success(m) => m,
Failure(token, msg) => {
let s = parse_failure_msg(token.kind);
let s = parse_failure_msg(&token);
let sp = token.span.substitute_dummy(def.span);
let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s);
err.span_label(sp, msg);
Expand Down
20 changes: 5 additions & 15 deletions src/libsyntax/ext/tt/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,24 +23,14 @@ pub struct Delimited {
}

impl Delimited {
/// Returns the opening delimiter (possibly `NoDelim`).
pub fn open_token(&self) -> TokenKind {
token::OpenDelim(self.delim)
}

/// Returns the closing delimiter (possibly `NoDelim`).
pub fn close_token(&self) -> TokenKind {
token::CloseDelim(self.delim)
}

/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
pub fn open_tt(&self, span: Span) -> TokenTree {
let open_span = if span.is_dummy() {
span
} else {
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
};
TokenTree::token(self.open_token(), open_span)
TokenTree::token(token::OpenDelim(self.delim), open_span)
}

/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
Expand All @@ -50,7 +40,7 @@ impl Delimited {
} else {
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
};
TokenTree::token(self.close_token(), close_span)
TokenTree::token(token::CloseDelim(self.delim), close_span)
}
}

Expand Down Expand Up @@ -282,7 +272,7 @@ where
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
// Must have `(` not `{` or `[`
if delim != token::Paren {
let tok = pprust::token_to_string(&token::OpenDelim(delim));
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
let msg = format!("expected `(`, found `{}`", tok);
sess.span_diagnostic.span_err(span.entire(), &msg);
}
Expand Down Expand Up @@ -371,8 +361,8 @@ where

/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
/// `None`.
fn kleene_op(token: &TokenKind) -> Option<KleeneOp> {
match *token {
fn kleene_op(token: &Token) -> Option<KleeneOp> {
match token.kind {
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
token::Question => Some(KleeneOp::ZeroOrOne),
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/parse/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -729,7 +729,7 @@ impl<'a> Parser<'a> {
&mut self,
t: &TokenKind,
) -> PResult<'a, bool /* recovered */> {
let token_str = pprust::token_to_string(t);
let token_str = pprust::token_kind_to_string(t);
let this_token_str = self.this_token_descr();
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
// Point at the end of the macro call when reaching end of macro arguments.
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/parse/lexer/tokentrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ impl<'a> TokenTreesReader<'a> {
let raw = self.string_reader.peek_span_src_raw;
self.real_token();
let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
&& token::is_op(&self.token);
&& self.token.is_op();
Ok((tt, if is_joint { Joint } else { NonJoint }))
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/parse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use crate::parse::parser::emit_unclosed_delims;
use crate::parse::token::TokenKind;
use crate::tokenstream::{TokenStream, TokenTree};
use crate::diagnostics::plugin::ErrorMap;
use crate::print::pprust::token_to_string;
use crate::print::pprust;

use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
use rustc_data_structures::sync::{Lrc, Lock};
Expand Down Expand Up @@ -312,7 +312,7 @@ pub fn maybe_file_to_stream(
for unmatched in unmatched_braces {
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
"incorrect close delimiter: `{}`",
token_to_string(&token::CloseDelim(unmatched.found_delim)),
pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
));
db.span_label(unmatched.found_span, "incorrect close delimiter");
if let Some(sp) = unmatched.candidate_span {
Expand Down
18 changes: 9 additions & 9 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ crate enum TokenType {
impl TokenType {
crate fn to_string(&self) -> String {
match *self {
TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
Expand All @@ -418,7 +418,7 @@ impl TokenType {
///
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
/// that `IDENT` is not the ident of a fn trait.
fn can_continue_type_after_non_fn_ident(t: &TokenKind) -> bool {
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
t == &token::ModSep || t == &token::Lt ||
t == &token::BinOp(token::Shl)
}
Expand Down Expand Up @@ -586,10 +586,10 @@ impl<'a> Parser<'a> {
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
if edible.contains(&self.token) {
if edible.contains(&self.token.kind) {
self.bump();
Ok(false)
} else if inedible.contains(&self.token) {
} else if inedible.contains(&self.token.kind) {
// leave it in the input
Ok(false)
} else if self.last_unexpected_token_span == Some(self.token.span) {
Expand Down Expand Up @@ -951,7 +951,7 @@ impl<'a> Parser<'a> {
Err(mut e) => {
// Attempt to keep parsing if it was a similar separator
if let Some(ref tokens) = t.similar_tokens() {
if tokens.contains(&self.token) {
if tokens.contains(&self.token.kind) {
self.bump();
}
}
Expand Down Expand Up @@ -1756,7 +1756,7 @@ impl<'a> Parser<'a> {
fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
let ident = self.parse_path_segment_ident()?;

let is_args_start = |token: &TokenKind| match *token {
let is_args_start = |token: &Token| match token.kind {
token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
| token::LArrow => true,
_ => false,
Expand Down Expand Up @@ -2822,7 +2822,7 @@ impl<'a> Parser<'a> {
LhsExpr::AttributesParsed(attrs) => Some(attrs),
_ => None,
};
if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) {
if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
return self.parse_prefix_range_expr(attrs);
} else {
self.parse_prefix_expr(attrs)?
Expand Down Expand Up @@ -3099,7 +3099,7 @@ impl<'a> Parser<'a> {
self.err_dotdotdot_syntax(self.token.span);
}

debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token),
debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
"parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
self.token);
let tok = self.token.clone();
Expand Down Expand Up @@ -7867,7 +7867,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler:
for unmatched in unclosed_delims.iter() {
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
"incorrect close delimiter: `{}`",
pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)),
pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
));
err.span_label(unmatched.found_span, "incorrect close delimiter");
if let Some(sp) = unmatched.candidate_span {
Expand Down
29 changes: 11 additions & 18 deletions src/libsyntax/parse/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ use log::info;

use std::fmt;
use std::mem;
use std::ops::Deref;
#[cfg(target_arch = "x86_64")]
use rustc_data_structures::static_assert_size;
use rustc_data_structures::sync::Lrc;
Expand Down Expand Up @@ -553,11 +552,11 @@ impl TokenKind {
impl Token {
// See comments in `Nonterminal::to_tokenstream` for why we care about
// *probably* equal here rather than actual equality
crate fn probably_equal_for_proc_macro(&self, other: &TokenKind) -> bool {
if mem::discriminant(&self.kind) != mem::discriminant(other) {
crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
if mem::discriminant(&self.kind) != mem::discriminant(&other.kind) {
return false
}
match (&self.kind, other) {
match (&self.kind, &other.kind) {
(&Eq, &Eq) |
(&Lt, &Lt) |
(&Le, &Le) |
Expand Down Expand Up @@ -631,14 +630,6 @@ impl PartialEq<TokenKind> for Token {
}
}

// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`.
impl Deref for Token {
type Target = TokenKind;
fn deref(&self) -> &Self::Target {
&self.kind
}
}

#[derive(Clone, RustcEncodable, RustcDecodable)]
/// For interpolation during macro expansion.
pub enum Nonterminal {
Expand Down Expand Up @@ -778,12 +769,14 @@ impl Nonterminal {
}
}

crate fn is_op(tok: &TokenKind) -> bool {
match *tok {
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
Ident(..) | Lifetime(..) | Interpolated(..) |
Whitespace | Comment | Shebang(..) | Eof => false,
_ => true,
impl Token {
crate fn is_op(&self) -> bool {
match self.kind {
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
Ident(..) | Lifetime(..) | Interpolated(..) |
Whitespace | Comment | Shebang(..) | Eof => false,
_ => true,
}
}
}

Expand Down
12 changes: 8 additions & 4 deletions src/libsyntax/print/pprust.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::ast::{Attribute, MacDelimiter, GenericArg};
use crate::util::parser::{self, AssocOp, Fixity};
use crate::attr;
use crate::source_map::{self, SourceMap, Spanned};
use crate::parse::token::{self, BinOpToken, Nonterminal, TokenKind};
use crate::parse::token::{self, BinOpToken, Nonterminal, Token, TokenKind};
use crate::parse::lexer::comments;
use crate::parse::{self, ParseSess};
use crate::print::pp::{self, Breaks};
Expand Down Expand Up @@ -189,7 +189,7 @@ pub fn literal_to_string(lit: token::Lit) -> String {
out
}

pub fn token_to_string(tok: &TokenKind) -> String {
pub fn token_kind_to_string(tok: &TokenKind) -> String {
match *tok {
token::Eq => "=".to_string(),
token::Lt => "<".to_string(),
Expand Down Expand Up @@ -250,6 +250,10 @@ pub fn token_to_string(tok: &TokenKind) -> String {
}
}

pub fn token_to_string(token: &Token) -> String {
token_kind_to_string(&token.kind)
}

pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
match *nt {
token::NtExpr(ref e) => expr_to_string(e),
Expand Down Expand Up @@ -734,11 +738,11 @@ pub trait PrintState<'a> {
}
}
TokenTree::Delimited(_, delim, tts) => {
self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
self.writer().word(token_kind_to_string(&token::OpenDelim(delim)))?;
self.writer().space()?;
self.print_tts(tts)?;
self.writer().space()?;
self.writer().word(token_to_string(&token::CloseDelim(delim)))
self.writer().word(token_kind_to_string(&token::CloseDelim(delim)))
},
}
}
Expand Down
8 changes: 0 additions & 8 deletions src/libsyntax/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,14 +126,6 @@ impl TokenTree {
}
}

/// Indicates if the stream is a token that is equal to the provided token.
pub fn eq_token(&self, t: TokenKind) -> bool {
match self {
TokenTree::Token(token) => *token == t,
_ => false,
}
}

pub fn joint(self) -> TokenStream {
TokenStream::new(vec![(self, Joint)])
}
Expand Down
0