8000 Rollup of 6 pull requests by Centril · Pull Request #69271 · rust-lang/rust · GitHub
[go: up one dir, main page]

Skip to content

Rollup of 6 pull requests #69271

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 31 commits into from
Feb 19, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
0663f25
Always qualify literals by type
matthewjasper Feb 3, 2020
f2980e7
Add fast path for is_freeze
matthewjasper Feb 15, 2020
2fd1544
ast: move Generics into AssocItemKinds
Centril Feb 13, 2020
f06df16
ast: colocate AssocItem with ForeignItem
Centril Feb 13, 2020
e2ae717
ast: tweak comments of Foreign/AssocItemKind
Centril Feb 13, 2020
95dc9b9
ast: normalize `ForeignItemKind::Ty` & `AssocItemKind::TyAlias`.
Centril Feb 14, 2020
f3e9763
ast: make `= <expr>;` optional in free statics/consts.
Centril Feb 14, 2020
1c2906e
ast/parser: fuse `static` & `const` grammars in all contexts.
Centril Feb 14, 2020
f8d2264
parse associated statics.
Centril Feb 15, 2020
35884fe
parse extern consts
Centril Feb 15, 2020
91110fd
ast: make ForeignItemKind an alias of AssocItemKind
Centril Feb 15, 2020
0e0c028
fuse extern & associated item parsing up to defaultness
Centril Feb 15, 2020
cf87edf
pprust: unify extern & associated item printing
Centril Feb 15, 2020
5abedd8
visit: unify extern & assoc item visiting
Centril Feb 15, 2020
d6238bd
reject assoc statics & extern consts during parsing
Centril Feb 15, 2020
8000
fe62bed
print_item_const: remove extraneous space
Centril Feb 15, 2020
f12ae4a
ast: tweak AssocItemKind::Macro comment
Centril Feb 15, 2020
8bafe88
Select an appropriate unused lifetime name in suggestion
estebank Feb 14, 2020
045b7d5
ast: add a FIXME
Centril Feb 17, 2020
2e07892
Do not emit note suggesting to implement trait to foreign type
LeSeulArtichaut Feb 16, 2020
0b1e08c
parse: recover `mut (x @ y)` as `(mut x @ mut y)`.
Centril Feb 17, 2020
d33b356
parser: Do not call `bump` recursively
petrochenkov Feb 16, 2020
ed2fd28
parser: Set previous and unnormalized tokens in couple more places
petrochenkov Feb 16, 2020
06fbb0b
parser: Remove `Option`s from unnormalized tokens
petrochenkov Feb 16, 2020
950845c
Add a test for proc macro generating `$ IDENT`
petrochenkov Feb 17, 2020
5e2a095
Rollup merge of #69146 - matthewjasper:literal-qualif, r=eddyb
Centril Feb 18, 2020
981acd9
Rollup merge of #69159 - estebank:use-appropriate-lt-name, r=ecstatic…
Centril Feb 18, 2020
b864d23
Rollup merge of #69194 - Centril:assoc-extern-fuse, r=petrochenkov
Centril Feb 18, 2020
1cf0194
Rollup merge of #69211 - petrochenkov:prevtok, r=Centril
Centril Feb 18, 2020
c499570
Rollup merge of #69217 - LeSeulArtichaut:remove-lint-impl-op, r=estebank
Centril Feb 18, 2020
6c6d45c
Rollup merge of #69236 - Centril:mut-parens-at-recovery, r=estebank
Centril Feb 18, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
parser: Remove Options from unnormalized tokens
They are always set synchronously with normalized tokens now
  • Loading branch information
petrochenkov committed Feb 17, 2020
commit 06fbb0b4faefeaf70f4616d6af9bc0c1ebc69bc2
8 changes: 4 additions & 4 deletions src/librustc_parse/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use rustc_errors::{Diagnostic, FatalError, Level, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::{FileName, SourceFile, Span};
use syntax::ast;
use syntax::token::{self, Nonterminal};
use syntax::token::{self, Nonterminal, Token};
use syntax::tokenstream::{self, TokenStream, TokenTree};

use std::path::{Path, PathBuf};
Expand Down Expand Up @@ -170,9 +170,9 @@ fn maybe_source_file_to_parser(
let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
let mut parser = stream_to_parser(sess, stream, None);
parser.unclosed_delims = unclosed_delims;
if parser.token == token::Eof && parser.token.span.is_dummy() {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt());
assert!(parser.unnormalized_token.is_none());
if parser.token == token::Eof {
let span = Span::new(end_pos, end_pos, parser.token.span.ctxt());
parser.set_token(Token::new(token::Eof, span));
}

Ok(parser)
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_parse/parser/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ impl<'a> Parser<'a> {
while let Some(op) = self.check_assoc_op() {
// Adjust the span for interpolated LHS to point to the `$lhs` token
// and not to what it refers to.
let lhs_span = match self.unnormalized_prev_token().kind {
let lhs_span = match self.unnormalized_prev_token.kind {
TokenKind::Interpolated(..) => self.prev_span,
_ => lhs.span,
};
Expand Down Expand Up @@ -527,7 +527,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, (Span, P<Expr>)> {
expr.map(|e| {
(
match self.unnormalized_prev_token().kind {
match self.unnormalized_prev_token.kind {
TokenKind::Interpolated(..) => self.prev_span,
_ => e.span,
},
Expand Down
55 changes: 19 additions & 36 deletions src/librustc_parse/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,18 +93,16 @@ pub struct Parser<'a> {
/// Use span from this token if you need an isolated span.
pub token: Token,
/// The current non-normalized token if it's different from `token`.
/// Preferable use is through the `unnormalized_token()` getter.
/// Use span from this token if you need to concatenate it with some neighbouring spans.
pub unnormalized_token: Option<Token>,
unnormalized_token: Token,
/// The previous normalized token.
/// Use span from this token if you need an isolated span.
prev_token: Token,
/// The previous non-normalized token if it's different from `prev_token`.
/// Preferable use is through the `unnormalized_prev_token()` getter.
/// Use span from this token if you need to concatenate it with some neighbouring spans.
unnormalized_prev_token: Option<Token>,
/// Equivalent to `unnormalized_prev_token().span`.
/// FIXME: Remove in favor of `(unnormalized_)prev_token().span`.
unnormalized_prev_token: Token,
/// Equivalent to `unnormalized_prev_token.span`.
/// FIXME: Remove in favor of `(unnormalized_)prev_token.span`.
pub prev_span: Span,
restrictions: Restrictions,
/// Used to determine the path to externally loaded source files.
Expand Down Expand Up @@ -378,9 +376,9 @@ impl<'a> Parser<'a> {
let mut parser = Parser {
sess,
token: Token::dummy(),
unnormalized_token: None,
unnormalized_token: Token::dummy(),
prev_token: Token::dummy(),
unnormalized_prev_token: None,
unnormalized_prev_token: Token::dummy(),
prev_span: DUMMY_SP,
restrictions: Restrictions::empty(),
recurse_into_file_modules,
Expand Down Expand Up @@ -422,14 +420,6 @@ impl<'a> Parser<'a> {
parser
}

fn unnormalized_token(&self) -> &Token {
self.unnormalized_token.as_ref().unwrap_or(&self.token)
}

fn unnormalized_prev_token(&self) -> &Token {
self.unnormalized_prev_token.as_ref().unwrap_or(&self.prev_token)
}

fn next_tok(&mut self, fallback_span: Span) -> Token {
let mut next = if self.desugar_doc_comments {
self.token_cursor.next_desugared()
Expand Down Expand Up @@ -899,18 +889,17 @@ impl<'a> Parser<'a> {
// Interpolated identifier (`$i: ident`) and lifetime (`$l: lifetime`)
// tokens are replaced with usual identifier and lifetime tokens,
// so the former are never encountered during normal parsing.
fn normalize_token(token: &Token) -> Option<Token> {
match &token.kind {
crate fn set_token(&mut self, token: Token) {
self.unnormalized_token = token;
self.token = match &self.unnormalized_token.kind {
token::Interpolated(nt) => match **nt {
token::NtIdent(ident, is_raw) => {
Some(Token::new(token::Ident(ident.name, is_raw), ident.span))
Token::new(token::Ident(ident.name, is_raw), ident.span)
}
token::NtLifetime(ident) => {
Some(Token::new(token::Lifetime(ident.name), ident.span))
}
_ => None,
token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span),
_ => self.unnormalized_token.clone(),
},
_ => None,
_ => self.unnormalized_token.clone(),
}
}

Expand All @@ -925,13 +914,11 @@ impl<'a> Parser<'a> {
// Update the current and previous tokens.
self.prev_token = self.token.take();
self.unnormalized_prev_token = self.unnormalized_token.take();
self.token = self.next_tok(self.unnormalized_prev_token().span);
if let Some(normalized_token) = Self::normalize_token(&self.token) {
self.unnormalized_token = Some(mem::replace(&mut self.token, normalized_token));
}
let next_token = self.next_tok(self.unnormalized_prev_token.span);
self.set_token(next_token);

// Update fields derived from the previous token.
self.prev_span = self.unnormalized_prev_token().span;
self.prev_span = self.unnormalized_prev_token.span;

self.expected_tokens.clear();
}
Expand All @@ -945,13 +932,10 @@ impl<'a> Parser<'a> {
// Update the current and previous tokens.
self.prev_token = self.token.take();
self.unnormalized_prev_token = self.unnormalized_token.take();
self.token = Token::new(next, span);
if let Some(normalized_token) = Self::normalize_token(&self.token) {
self.unnormalized_token = Some(mem::replace(&mut self.token, normalized_token));
}
self.set_token(Token::new(next, span));

// Update fields derived from the previous token.
self.prev_span = self.unnormalized_prev_token().span.with_hi(span.lo());
self.prev_span = self.unnormalized_prev_token.span.with_hi(span.lo());

self.expected_tokens.clear();
}
Expand Down Expand Up @@ -1096,8 +1080,7 @@ impl<'a> Parser<'a> {
&mut self.token_cursor.frame,
self.token_cursor.stack.pop().unwrap(),
);
self.token = Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close);
self.unnormalized_token = None;
self.set_token(Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close));
self.bump();
TokenTree::Delimited(frame.span, frame.delim, frame.tree_cursor.stream.into())
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_parse/parser/path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ impl<'a> Parser<'a> {
path
});

let lo = self.unnormalized_token().span;
let lo = self.unnormalized_token.span;
let mut segments = Vec::new();
let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) {
Expand Down
0