mirror of
https://github.com/rust-lang/rust.git
synced 2026-04-27 18:57:42 +03:00
reduce the amount of panics in {TokenStream, Literal}::from_str calls
This commit is contained in:
@@ -1378,6 +1378,13 @@ pub fn cancel(mut self) {
|
||||
drop(self);
|
||||
}
|
||||
|
||||
/// Cancels this diagnostic and returns its first message, if it exists.
|
||||
pub fn cancel_into_message(self) -> Option<String> {
|
||||
let s = self.diag.as_ref()?.messages.get(0)?.0.as_str().map(ToString::to_string);
|
||||
self.cancel();
|
||||
s
|
||||
}
|
||||
|
||||
/// See `DiagCtxt::stash_diagnostic` for details.
|
||||
pub fn stash(mut self, span: Span, key: StashKey) -> Option<ErrorGuaranteed> {
|
||||
let diag = self.take_diag();
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan};
|
||||
use rustc_parse::lexer::{StripTokens, nfc_normalize};
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
|
||||
use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream};
|
||||
use rustc_proc_macro::bridge::{
|
||||
DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree, server,
|
||||
};
|
||||
@@ -431,6 +431,13 @@ fn to_internal(self) -> rustc_errors::Level {
|
||||
}
|
||||
}
|
||||
|
||||
fn cancel_diags_into_string(diags: Vec<Diag<'_>>) -> String {
|
||||
let mut messages = diags.into_iter().flat_map(Diag::cancel_into_message);
|
||||
let msg = messages.next().expect("no diagnostic has a message");
|
||||
messages.for_each(|_| ()); // consume iterator to cancel the remaining diagnostics
|
||||
msg
|
||||
}
|
||||
|
||||
pub(crate) struct Rustc<'a, 'b> {
|
||||
ecx: &'a mut ExtCtxt<'b>,
|
||||
def_site: Span,
|
||||
@@ -494,35 +501,32 @@ fn track_path(&mut self, path: &str) {
|
||||
self.psess().file_depinfo.borrow_mut().insert(Symbol::intern(path));
|
||||
}
|
||||
|
||||
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symbol>, ()> {
|
||||
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symbol>, String> {
|
||||
let name = FileName::proc_macro_source_code(s);
|
||||
|
||||
let mut parser = unwrap_or_emit_fatal(new_parser_from_source_str(
|
||||
self.psess(),
|
||||
name,
|
||||
s.to_owned(),
|
||||
StripTokens::Nothing,
|
||||
));
|
||||
let mut parser =
|
||||
new_parser_from_source_str(self.psess(), name, s.to_owned(), StripTokens::Nothing)
|
||||
.map_err(cancel_diags_into_string)?;
|
||||
|
||||
let first_span = parser.token.span.data();
|
||||
let minus_present = parser.eat(exp!(Minus));
|
||||
|
||||
let lit_span = parser.token.span.data();
|
||||
let token::Literal(mut lit) = parser.token.kind else {
|
||||
return Err(());
|
||||
return Err("not a literal".to_string());
|
||||
};
|
||||
|
||||
// Check no comment or whitespace surrounding the (possibly negative)
|
||||
// literal, or more tokens after it.
|
||||
if (lit_span.hi.0 - first_span.lo.0) as usize != s.len() {
|
||||
return Err(());
|
||||
return Err("comment or whitespace around literal".to_string());
|
||||
}
|
||||
|
||||
if minus_present {
|
||||
// If minus is present, check no comment or whitespace in between it
|
||||
// and the literal token.
|
||||
if first_span.hi.0 != lit_span.lo.0 {
|
||||
return Err(());
|
||||
return Err("comment or whitespace after minus".to_string());
|
||||
}
|
||||
|
||||
// Check literal is a kind we allow to be negated in a proc macro token.
|
||||
@@ -536,7 +540,9 @@ fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symb
|
||||
| token::LitKind::ByteStrRaw(_)
|
||||
| token::LitKind::CStr
|
||||
| token::LitKind::CStrRaw(_)
|
||||
| token::LitKind::Err(_) => return Err(()),
|
||||
| token::LitKind::Err(_) => {
|
||||
return Err("non-numeric literal may not be negated".to_string());
|
||||
}
|
||||
token::LitKind::Integer | token::LitKind::Float => {}
|
||||
}
|
||||
|
||||
@@ -576,13 +582,14 @@ fn ts_is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
||||
stream.is_empty()
|
||||
}
|
||||
|
||||
fn ts_from_str(&mut self, src: &str) -> Self::TokenStream {
|
||||
unwrap_or_emit_fatal(source_str_to_stream(
|
||||
fn ts_from_str(&mut self, src: &str) -> Result<Self::TokenStream, String> {
|
||||
source_str_to_stream(
|
||||
self.psess(),
|
||||
FileName::proc_macro_source_code(src),
|
||||
src.to_string(),
|
||||
Some(self.call_site),
|
||||
))
|
||||
)
|
||||
.map_err(cancel_diags_into_string)
|
||||
}
|
||||
|
||||
fn ts_to_string(&mut self, stream: &Self::TokenStream) -> String {
|
||||
|
||||
@@ -37,14 +37,14 @@ macro_rules! with_api {
|
||||
fn injected_env_var(var: &str) -> Option<String>;
|
||||
fn track_env_var(var: &str, value: Option<&str>);
|
||||
fn track_path(path: &str);
|
||||
fn literal_from_str(s: &str) -> Result<Literal<$Span, $Symbol>, ()>;
|
||||
fn literal_from_str(s: &str) -> Result<Literal<$Span, $Symbol>, String>;
|
||||
fn emit_diagnostic(diagnostic: Diagnostic<$Span>);
|
||||
|
||||
fn ts_drop(stream: $TokenStream);
|
||||
fn ts_clone(stream: &$TokenStream) -> $TokenStream;
|
||||
fn ts_is_empty(stream: &$TokenStream) -> bool;
|
||||
fn ts_expand_expr(stream: &$TokenStream) -> Result<$TokenStream, ()>;
|
||||
fn ts_from_str(src: &str) -> $TokenStream;
|
||||
fn ts_from_str(src: &str) -> Result<$TokenStream, String>;
|
||||
fn ts_to_string(stream: &$TokenStream) -> String;
|
||||
fn ts_from_token_tree(
|
||||
tree: TokenTree<$TokenStream, $Span, $Symbol>,
|
||||
|
||||
@@ -110,15 +110,18 @@ impl !Send for TokenStream {}
|
||||
impl !Sync for TokenStream {}
|
||||
|
||||
/// Error returned from `TokenStream::from_str`.
|
||||
///
|
||||
/// The contained error message is explicitly not guaranteed to be stable in any way,
|
||||
/// and may change between Rust versions or across compilations.
|
||||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||
#[non_exhaustive]
|
||||
#[derive(Debug)]
|
||||
pub struct LexError;
|
||||
pub struct LexError(String);
|
||||
|
||||
#[stable(feature = "proc_macro_lexerror_impls", since = "1.44.0")]
|
||||
impl fmt::Display for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("cannot parse string into token stream")
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,7 +200,7 @@ impl FromStr for TokenStream {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
Ok(TokenStream(Some(BridgeMethods::ts_from_str(src))))
|
||||
Ok(TokenStream(Some(BridgeMethods::ts_from_str(src).map_err(LexError)?)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1594,7 +1597,7 @@ impl FromStr for Literal {
|
||||
fn from_str(src: &str) -> Result<Self, LexError> {
|
||||
match BridgeMethods::literal_from_str(src) {
|
||||
Ok(literal) => Ok(Literal(literal)),
|
||||
Err(()) => Err(LexError),
|
||||
Err(msg) => Err(LexError(msg)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,8 +62,9 @@ fn track_path(&mut self, path: &str) {
|
||||
self.tracked_paths.insert(path.into());
|
||||
}
|
||||
|
||||
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
|
||||
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, String> {
|
||||
literal_from_str(s, self.call_site)
|
||||
.map_err(|()| "cannot parse string into literal".to_string())
|
||||
}
|
||||
|
||||
fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {
|
||||
@@ -81,14 +82,9 @@ fn ts_clone(&mut self, stream: &Self::TokenStream) -> Self::TokenStream {
|
||||
fn ts_is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
||||
stream.is_empty()
|
||||
}
|
||||
fn ts_from_str(&mut self, src: &str) -> Self::TokenStream {
|
||||
Self::TokenStream::from_str(src, self.call_site).unwrap_or_else(|e| {
|
||||
Self::TokenStream::from_str(
|
||||
&format!("compile_error!(\"failed to parse str to token stream: {e}\")"),
|
||||
self.call_site,
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
fn ts_from_str(&mut self, src: &str) -> Result<Self::TokenStream, String> {
|
||||
Self::TokenStream::from_str(src, self.call_site)
|
||||
.map_err(|e| format!("failed to parse str to token stream: {e}"))
|
||||
}
|
||||
fn ts_to_string(&mut self, stream: &Self::TokenStream) -> String {
|
||||
stream.to_string()
|
||||
|
||||
@@ -67,8 +67,9 @@ fn track_path(&mut self, path: &str) {
|
||||
self.tracked_paths.insert(path.into());
|
||||
}
|
||||
|
||||
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
|
||||
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, String> {
|
||||
literal_from_str(s, self.call_site)
|
||||
.map_err(|()| "cannot parse string into literal".to_string())
|
||||
}
|
||||
|
||||
fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {}
|
||||
@@ -84,14 +85,9 @@ fn ts_clone(&mut self, stream: &Self::TokenStream) -> Self::TokenStream {
|
||||
fn ts_is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
||||
stream.is_empty()
|
||||
}
|
||||
fn ts_from_str(&mut self, src: &str) -> Self::TokenStream {
|
||||
Self::TokenStream::from_str(src, self.call_site).unwrap_or_else(|e| {
|
||||
Self::TokenStream::from_str(
|
||||
&format!("compile_error!(\"failed to parse str to token stream: {e}\")"),
|
||||
self.call_site,
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
fn ts_from_str(&mut self, src: &str) -> Result<Self::TokenStream, String> {
|
||||
Self::TokenStream::from_str(src, self.call_site)
|
||||
.map_err(|e| format!("failed to parse str to token stream: {e}"))
|
||||
}
|
||||
fn ts_to_string(&mut self, stream: &Self::TokenStream) -> String {
|
||||
stream.to_string()
|
||||
|
||||
@@ -20,5 +20,9 @@ pub fn invalid_raw_ident(_: TokenStream) -> TokenStream {
|
||||
|
||||
#[proc_macro]
|
||||
pub fn lexer_failure(_: TokenStream) -> TokenStream {
|
||||
"a b ) c".parse().expect("parsing failed without panic")
|
||||
assert_eq!(
|
||||
"a b ) c".parse::<TokenStream>().unwrap_err().to_string(),
|
||||
"unexpected closing delimiter: `)`"
|
||||
);
|
||||
TokenStream::new()
|
||||
}
|
||||
|
||||
@@ -110,6 +110,10 @@ pub fn run() {
|
||||
lit("3//\n4", NormalErr);
|
||||
lit("18.u8E", NormalErr);
|
||||
lit("/*a*/ //", NormalErr);
|
||||
stream("1 ) 2", NormalErr);
|
||||
stream("( x [ ) ]", NormalErr);
|
||||
lit("1 ) 2", NormalErr);
|
||||
lit("( x [ ) ]", NormalErr);
|
||||
// FIXME: all of the cases below should return an Err and emit no diagnostics, but don't yet.
|
||||
|
||||
// emits diagnostics and returns LexError
|
||||
@@ -122,8 +126,6 @@ pub fn run() {
|
||||
|
||||
for parse in [stream as fn(&str, Mode), lit] {
|
||||
// emits diagnostic(s), then panics
|
||||
parse("1 ) 2", OtherWithPanic);
|
||||
parse("( x [ ) ]", OtherWithPanic);
|
||||
parse("r#", OtherWithPanic);
|
||||
|
||||
// emits diagnostic(s), then returns Ok(Literal { kind: ErrWithGuar, .. })
|
||||
|
||||
@@ -1,13 +1,9 @@
|
||||
//@ proc-macro: invalid-punct-ident.rs
|
||||
//@ needs-unwind proc macro panics to report errors
|
||||
//@ check-pass
|
||||
|
||||
#[macro_use]
|
||||
extern crate invalid_punct_ident;
|
||||
|
||||
lexer_failure!();
|
||||
//~^ ERROR proc macro panicked
|
||||
//~| ERROR unexpected closing delimiter: `)`
|
||||
|
||||
fn main() {
|
||||
let _recovery_witness: () = 0; //~ ERROR mismatched types
|
||||
}
|
||||
fn main() {}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
error: unexpected closing delimiter: `)`
|
||||
--> $DIR/invalid-punct-ident-4.rs:7:1
|
||||
|
|
||||
LL | lexer_failure!();
|
||||
| ^^^^^^^^^^^^^^^^ unexpected closing delimiter
|
||||
|
|
||||
= note: this error originates in the macro `lexer_failure` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: proc macro panicked
|
||||
--> $DIR/invalid-punct-ident-4.rs:7:1
|
||||
|
|
||||
LL | lexer_failure!();
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-punct-ident-4.rs:12:33
|
||||
|
|
||||
LL | let _recovery_witness: () = 0;
|
||||
| -- ^ expected `()`, found integer
|
||||
| |
|
||||
| expected due to this
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0308`.
|
||||
@@ -40,26 +40,6 @@ LL | nonfatal_parsing::run!();
|
||||
= note: prefixed identifiers and literals are reserved since Rust 2021
|
||||
= note: this error originates in the macro `nonfatal_parsing::run` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: unexpected closing delimiter: `)`
|
||||
--> $DIR/nonfatal-parsing.rs:15:5
|
||||
|
|
||||
LL | nonfatal_parsing::run!();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ unexpected closing delimiter
|
||||
|
|
||||
= note: this error originates in the macro `nonfatal_parsing::run` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: unexpected closing delimiter: `]`
|
||||
--> $DIR/nonfatal-parsing.rs:15:5
|
||||
|
|
||||
LL | nonfatal_parsing::run!();
|
||||
| -^^^^^^^^^^^^^^^^^^^^^^^
|
||||
| |
|
||||
| the nearest open delimiter
|
||||
| missing open `(` for this delimiter
|
||||
| unexpected closing delimiter
|
||||
|
|
||||
= note: this error originates in the macro `nonfatal_parsing::run` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: found invalid character; only `#` is allowed in raw string delimitation: \u{0}
|
||||
--> $DIR/nonfatal-parsing.rs:15:5
|
||||
|
|
||||
@@ -135,21 +115,6 @@ LL | nonfatal_parsing::run!();
|
||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
= note: this error originates in the macro `nonfatal_parsing::run` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: unexpected closing delimiter: `)`
|
||||
--> <proc-macro source code>:1:3
|
||||
|
|
||||
LL | 1 ) 2
|
||||
| ^ unexpected closing delimiter
|
||||
|
||||
error: unexpected closing delimiter: `]`
|
||||
--> <proc-macro source code>:1:10
|
||||
|
|
||||
LL | ( x [ ) ]
|
||||
| - - ^ unexpected closing delimiter
|
||||
| | |
|
||||
| | missing open `(` for this delimiter
|
||||
| the nearest open delimiter
|
||||
|
||||
error: found invalid character; only `#` is allowed in raw string delimitation: \u{0}
|
||||
--> <proc-macro source code>:1:1
|
||||
|
|
||||
@@ -210,6 +175,6 @@ error: invalid digit for a base 2 literal
|
||||
LL | /*a*/ 0b2 //
|
||||
| ^
|
||||
|
||||
error: aborting due to 24 previous errors
|
||||
error: aborting due to 20 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0768`.
|
||||
|
||||
@@ -29,15 +29,19 @@ Ok(TokenStream [Literal { kind: Integer, symbol: "3", suffix: None, span: #44 by
|
||||
Ok(TokenStream [Literal { kind: Char, symbol: "c", suffix: None, span: #44 bytes(361..385) }])
|
||||
Ok(TokenStream [])
|
||||
### ERRORS
|
||||
Err(LexError)
|
||||
Err(LexError)
|
||||
Err(LexError)
|
||||
Err(LexError)
|
||||
Err(LexError)
|
||||
Err(LexError)
|
||||
Err(LexError)
|
||||
Err(LexError)
|
||||
Err(LexError)
|
||||
Err(LexError("comment or whitespace around literal"))
|
||||
Err(LexError("comment or whitespace around literal"))
|
||||
Err(LexError("comment or whitespace around literal"))
|
||||
Err(LexError("comment or whitespace around literal"))
|
||||
Err(LexError("comment or whitespace around literal"))
|
||||
Err(LexError("comment or whitespace around literal"))
|
||||
Err(LexError("not a literal"))
|
||||
Err(LexError("unexpected closing delimiter: `)`"))
|
||||
Err(LexError("unexpected closing delimiter: `]`"))
|
||||
Err(LexError("unexpected closing delimiter: `)`"))
|
||||
Err(LexError("unexpected closing delimiter: `]`"))
|
||||
Err(LexError("not a literal"))
|
||||
Err(LexError("not a literal"))
|
||||
Ok(TokenStream [Ident { ident: "r", span: #44 bytes(361..385) }, Literal { kind: Char, symbol: "r", suffix: None, span: #44 bytes(361..385) }])
|
||||
Ok(TokenStream [Ident { ident: "c", span: #44 bytes(361..385) }, Literal { kind: Char, symbol: "r", suffix: None, span: #44 bytes(361..385) }])
|
||||
Ok(TokenStream [Literal { kind: ErrWithGuar, symbol: "0b2", suffix: None, span: #44 bytes(361..385) }])
|
||||
@@ -51,4 +55,4 @@ Ok(Literal { kind: ErrWithGuar, symbol: "0b", suffix: Some("f32"), span: #44 byt
|
||||
Ok(Literal { kind: ErrWithGuar, symbol: "0b0.0", suffix: Some("f32"), span: #44 bytes(361..385) })
|
||||
Ok(Literal { kind: ErrWithGuar, symbol: "'''", suffix: None, span: #44 bytes(361..385) })
|
||||
Ok(Literal { kind: ErrWithGuar, symbol: "'\n'", suffix: None, span: #44 bytes(361..385) })
|
||||
Err(LexError)
|
||||
Err(LexError("comment or whitespace around literal"))
|
||||
|
||||
Reference in New Issue
Block a user