Skip to content

Commit cda7c1c

Browse files
committed
Auto merge of #33199 - mitaa:tokenize-responsibly, r=nrc
Make some fatal lexer errors recoverable I've kept the changes to a minimum since I'm not really sure if this approach is a acceptable. fixes #12834 cc @nrc
2 parents b52d76a + 6887202 commit cda7c1c

File tree

5 files changed

+148
-74
lines changed

5 files changed

+148
-74
lines changed

src/librustdoc/html/highlight.rs

+19-7
Original file line numberDiff line numberDiff line change
@@ -29,25 +29,27 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>
2929

3030
let mut out = Vec::new();
3131
write_header(class, id, &mut out).unwrap();
32-
write_source(&sess,
33-
lexer::StringReader::new(&sess.span_diagnostic, fm),
34-
&mut out).unwrap();
32+
if let Err(_) = write_source(&sess,
33+
lexer::StringReader::new(&sess.span_diagnostic, fm),
34+
&mut out) {
35+
return format!("<pre>{}</pre>", src)
36+
}
3537
write_footer(&mut out).unwrap();
3638
String::from_utf8_lossy(&out[..]).into_owned()
3739
}
3840

3941
/// Highlights `src`, returning the HTML output. Returns only the inner html to
4042
/// be inserted into an element. C.f., `render_with_highlighting` which includes
4143
/// an enclosing `<pre>` block.
42-
pub fn render_inner_with_highlighting(src: &str) -> String {
44+
pub fn render_inner_with_highlighting(src: &str) -> io::Result<String> {
4345
let sess = parse::ParseSess::new();
4446
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
4547

4648
let mut out = Vec::new();
4749
write_source(&sess,
4850
lexer::StringReader::new(&sess.span_diagnostic, fm),
49-
&mut out).unwrap();
50-
String::from_utf8_lossy(&out[..]).into_owned()
51+
&mut out)?;
52+
Ok(String::from_utf8_lossy(&out[..]).into_owned())
5153
}
5254

5355
/// Exhausts the `lexer` writing the output into `out`.
@@ -65,7 +67,17 @@ fn write_source(sess: &parse::ParseSess,
6567
let mut is_macro = false;
6668
let mut is_macro_nonterminal = false;
6769
loop {
68-
let next = lexer.next_token();
70+
let next = match lexer.try_next_token() {
71+
Ok(tok) => tok,
72+
Err(_) => {
73+
lexer.emit_fatal_errors();
74+
lexer.span_diagnostic.struct_warn("Backing out of syntax highlighting")
75+
.note("You probably did not intend to render this \
76+
as a rust code-block")
77+
.emit();
78+
return Err(io::Error::new(io::ErrorKind::Other, ""))
79+
},
80+
};
6981

7082
let snip = |sp| sess.codemap().span_to_snippet(sp).unwrap();
7183

src/libsyntax/errors/mod.rs

+2
Original file line numberDiff line numberDiff line change
@@ -177,6 +177,7 @@ impl error::Error for ExplicitBug {
177177

178178
/// Used for emitting structured error messages and other diagnostic information.
179179
#[must_use]
180+
#[derive(Clone)]
180181
pub struct DiagnosticBuilder<'a> {
181182
emitter: &'a RefCell<Box<Emitter>>,
182183
level: Level,
@@ -187,6 +188,7 @@ pub struct DiagnosticBuilder<'a> {
187188
}
188189

189190
/// For example a note attached to an error.
191+
#[derive(Clone)]
190192
struct SubDiagnostic {
191193
level: Level,
192194
message: String,

src/libsyntax/ext/tt/transcribe.rs

+3-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ use self::LockstepIterSize::*;
1212
use ast;
1313
use ast::{TokenTree, Ident, Name};
1414
use codemap::{Span, DUMMY_SP};
15-
use errors::Handler;
15+
use errors::{Handler, DiagnosticBuilder};
1616
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
1717
use parse::token::{DocComment, MatchNt, SubstNt};
1818
use parse::token::{Token, NtIdent, SpecialMacroVar};
@@ -50,6 +50,7 @@ pub struct TtReader<'a> {
5050
pub cur_span: Span,
5151
/// Transform doc comments. Only useful in macro invocations
5252
pub desugar_doc_comments: bool,
53+
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
5354
}
5455

5556
/// This can do Macro-By-Example transcription. On the other hand, if
@@ -99,6 +100,7 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
99100
/* dummy values, never read: */
100101
cur_tok: token::Eof,
101102
cur_span: DUMMY_SP,
103+
fatal_errs: Vec::new(),
102104
};
103105
tt_next_token(&mut r); /* get cur_tok and cur_span set up */
104106
r

0 commit comments

Comments
 (0)