2016-05-02 10:53:24 +12:00
|
|
|
//! Basic syntax highlighting functionality.
|
2014-02-20 01:14:51 -08:00
|
|
|
//!
|
|
|
|
//! This module uses libsyntax's lexer to provide token-based highlighting for
|
|
|
|
//! the HTML documentation generated by rustdoc.
|
2016-05-02 10:53:24 +12:00
|
|
|
//!
|
2018-07-22 14:10:10 -06:00
|
|
|
//! Use the `render_with_highlighting` to highlight some rust code.
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2019-02-23 16:40:07 +09:00
|
|
|
use crate::html::escape::Escape;
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
use std::fmt::Display;
|
2015-03-11 15:24:14 -07:00
|
|
|
use std::io;
|
|
|
|
use std::io::prelude::*;
|
2016-05-02 10:53:24 +12:00
|
|
|
|
2019-10-15 22:48:13 +02:00
|
|
|
use rustc_parse::lexer;
|
2020-01-11 15:03:15 +01:00
|
|
|
use rustc_session::parse::ParseSess;
|
2020-01-01 19:25:28 +01:00
|
|
|
use rustc_span::source_map::SourceMap;
|
2020-01-01 19:30:57 +01:00
|
|
|
use rustc_span::symbol::{kw, sym};
|
2019-12-31 20:15:40 +03:00
|
|
|
use rustc_span::{FileName, Span};
|
2019-12-22 17:42:04 -05:00
|
|
|
use syntax::token::{self, Token};
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-04-04 11:07:41 +12:00
|
|
|
/// Highlights `src`, returning the HTML output.
|
2018-10-08 22:51:37 +02:00
|
|
|
pub fn render_with_highlighting(
|
|
|
|
src: &str,
|
|
|
|
class: Option<&str>,
|
2020-01-26 17:24:40 +01:00
|
|
|
playground_button: Option<&str>,
|
2018-10-08 22:51:37 +02:00
|
|
|
tooltip: Option<(&str, &str)>,
|
|
|
|
) -> String {
|
2014-05-10 17:39:08 -07:00
|
|
|
debug!("highlighting: ================\n{}\n==============", src);
|
2014-11-11 16:01:29 -05:00
|
|
|
let mut out = Vec::new();
|
2017-09-07 00:08:39 +02:00
|
|
|
if let Some((tooltip, class)) = tooltip {
|
2019-12-22 17:42:04 -05:00
|
|
|
write!(
|
|
|
|
out,
|
|
|
|
"<div class='information'><div class='tooltip {}'>ⓘ<span \
|
2017-09-07 00:08:39 +02:00
|
|
|
class='tooltiptext'>{}</span></div></div>",
|
2019-12-22 17:42:04 -05:00
|
|
|
class, tooltip
|
|
|
|
)
|
|
|
|
.unwrap();
|
2017-09-07 00:08:39 +02:00
|
|
|
}
|
2018-12-15 16:25:50 -05:00
|
|
|
|
2019-11-19 21:38:31 -05:00
|
|
|
let sess = ParseSess::with_silent_emitter();
|
2020-02-22 16:07:05 +02:00
|
|
|
let sf = sess
|
2019-12-22 17:42:04 -05:00
|
|
|
.source_map()
|
|
|
|
.new_source_file(FileName::Custom(String::from("rustdoc-highlighting")), src.to_owned());
|
2020-01-17 23:44:44 +00:00
|
|
|
let highlight_result = rustc_driver::catch_fatal_errors(|| {
|
2020-02-22 16:07:05 +02:00
|
|
|
let lexer = lexer::StringReader::new(&sess, sf, None);
|
2019-07-03 13:30:12 +03:00
|
|
|
let mut classifier = Classifier::new(lexer, sess.source_map());
|
2018-12-15 16:25:50 -05:00
|
|
|
|
2019-07-03 13:30:12 +03:00
|
|
|
let mut highlighted_source = vec![];
|
|
|
|
if classifier.write_source(&mut highlighted_source).is_err() {
|
2019-07-30 13:45:08 +03:00
|
|
|
Err(())
|
2019-07-03 13:30:12 +03:00
|
|
|
} else {
|
|
|
|
Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
|
|
|
|
}
|
2020-01-17 23:44:44 +00:00
|
|
|
})
|
|
|
|
.unwrap_or(Err(()));
|
2018-12-15 16:25:50 -05:00
|
|
|
|
|
|
|
match highlight_result {
|
|
|
|
Ok(highlighted_source) => {
|
|
|
|
write_header(class, &mut out).unwrap();
|
|
|
|
write!(out, "{}", highlighted_source).unwrap();
|
2020-01-26 17:24:40 +01:00
|
|
|
write_footer(&mut out, playground_button).unwrap();
|
2018-09-03 22:24:11 +02:00
|
|
|
}
|
2019-07-30 13:45:08 +03:00
|
|
|
Err(()) => {
|
|
|
|
// If errors are encountered while trying to highlight, just emit
|
|
|
|
// the unhighlighted source.
|
2020-01-15 22:42:04 +00:00
|
|
|
write!(out, "<pre><code>{}</code></pre>", Escape(src)).unwrap();
|
2018-12-15 16:25:50 -05:00
|
|
|
}
|
2016-09-08 01:16:06 +02:00
|
|
|
}
|
2018-12-15 16:25:50 -05:00
|
|
|
|
2016-04-04 11:07:41 +12:00
|
|
|
String::from_utf8_lossy(&out[..]).into_owned()
|
|
|
|
}
|
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
/// Processes a program (nested in the internal `lexer`), classifying strings of
|
|
|
|
/// text by highlighting category (`Class`). Calls out to a `Writer` to write
|
|
|
|
/// each span of text in sequence.
|
2018-07-22 14:10:10 -06:00
|
|
|
struct Classifier<'a> {
|
2016-05-02 10:53:24 +12:00
|
|
|
lexer: lexer::StringReader<'a>,
|
2019-07-02 13:44:38 +03:00
|
|
|
peek_token: Option<Token>,
|
2018-08-18 12:14:14 +02:00
|
|
|
source_map: &'a SourceMap,
|
2016-05-02 10:53:24 +12:00
|
|
|
|
|
|
|
// State of the classifier.
|
|
|
|
in_attribute: bool,
|
|
|
|
in_macro: bool,
|
|
|
|
in_macro_nonterminal: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// How a span of text is classified. Mostly corresponds to token kinds.
|
|
|
|
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
2018-07-22 14:10:10 -06:00
|
|
|
enum Class {
|
2016-05-02 10:53:24 +12:00
|
|
|
None,
|
|
|
|
Comment,
|
|
|
|
DocComment,
|
|
|
|
Attribute,
|
|
|
|
KeyWord,
|
|
|
|
// Keywords that do pointer/reference stuff.
|
|
|
|
RefKeyWord,
|
|
|
|
Self_,
|
|
|
|
Op,
|
|
|
|
Macro,
|
|
|
|
MacroNonTerminal,
|
|
|
|
String,
|
|
|
|
Number,
|
|
|
|
Bool,
|
|
|
|
Ident,
|
|
|
|
Lifetime,
|
|
|
|
PreludeTy,
|
|
|
|
PreludeVal,
|
2016-10-12 05:23:37 +02:00
|
|
|
QuestionMark,
|
2014-02-20 01:14:51 -08:00
|
|
|
}
|
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
/// Trait that controls writing the output of syntax highlighting. Users should
|
2017-08-15 21:45:21 +02:00
|
|
|
/// implement this trait to customize writing output.
|
2014-02-20 01:14:51 -08:00
|
|
|
///
|
2016-05-02 10:53:24 +12:00
|
|
|
/// The classifier will call into the `Writer` implementation as it finds spans
|
|
|
|
/// of text to highlight. Exactly how that text should be highlighted is up to
|
2016-07-03 10:00:52 +02:00
|
|
|
/// the implementation.
|
2018-07-22 14:10:10 -06:00
|
|
|
trait Writer {
|
2016-05-02 10:53:24 +12:00
|
|
|
/// Called when we start processing a span of text that should be highlighted.
|
|
|
|
/// The `Class` argument specifies how it should be highlighted.
|
2017-05-02 05:55:20 +02:00
|
|
|
fn enter_span(&mut self, _: Class) -> io::Result<()>;
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
/// Called at the end of a span of highlighted text.
|
|
|
|
fn exit_span(&mut self) -> io::Result<()>;
|
2014-07-04 22:30:39 -07:00
|
|
|
|
2019-02-08 14:53:55 +01:00
|
|
|
/// Called for a span of text. If the text should be highlighted differently from the
|
2018-07-22 14:10:10 -06:00
|
|
|
/// surrounding text, then the `Class` argument will be a value other than `None`.
|
|
|
|
///
|
2016-05-02 10:53:24 +12:00
|
|
|
/// The following sequences of callbacks are equivalent:
|
|
|
|
/// ```plain
|
|
|
|
/// enter_span(Foo), string("text", None), exit_span()
|
|
|
|
/// string("text", Foo)
|
|
|
|
/// ```
|
|
|
|
/// The latter can be thought of as a shorthand for the former, which is
|
|
|
|
/// more flexible.
|
2019-12-22 17:42:04 -05:00
|
|
|
fn string<T: Display>(&mut self, text: T, klass: Class) -> io::Result<()>;
|
2016-05-02 10:53:24 +12:00
|
|
|
}
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
// Implement `Writer` for anthing that can be written to, this just implements
|
|
|
|
// the default rustdoc behaviour.
|
|
|
|
impl<U: Write> Writer for U {
|
2019-12-22 17:42:04 -05:00
|
|
|
fn string<T: Display>(&mut self, text: T, klass: Class) -> io::Result<()> {
|
2016-05-02 10:53:24 +12:00
|
|
|
match klass {
|
|
|
|
Class::None => write!(self, "{}", text),
|
2017-02-28 00:27:19 +01:00
|
|
|
klass => write!(self, "<span class=\"{}\">{}</span>", klass.rustdoc_class(), text),
|
2016-05-02 10:53:24 +12:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn enter_span(&mut self, klass: Class) -> io::Result<()> {
|
2017-02-28 00:27:19 +01:00
|
|
|
write!(self, "<span class=\"{}\">", klass.rustdoc_class())
|
2016-05-02 10:53:24 +12:00
|
|
|
}
|
|
|
|
|
|
|
|
fn exit_span(&mut self) -> io::Result<()> {
|
|
|
|
write!(self, "</span>")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-15 16:25:50 -05:00
|
|
|
enum HighlightError {
|
|
|
|
LexError,
|
|
|
|
IoError(io::Error),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<io::Error> for HighlightError {
|
|
|
|
fn from(err: io::Error) -> Self {
|
|
|
|
HighlightError::IoError(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
impl<'a> Classifier<'a> {
|
2018-08-18 12:14:14 +02:00
|
|
|
fn new(lexer: lexer::StringReader<'a>, source_map: &'a SourceMap) -> Classifier<'a> {
|
2016-05-02 10:53:24 +12:00
|
|
|
Classifier {
|
2017-08-06 22:54:09 -07:00
|
|
|
lexer,
|
2019-07-02 13:44:38 +03:00
|
|
|
peek_token: None,
|
2018-08-18 12:14:14 +02:00
|
|
|
source_map,
|
2016-05-02 10:53:24 +12:00
|
|
|
in_attribute: false,
|
|
|
|
in_macro: false,
|
|
|
|
in_macro_nonterminal: false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-15 16:25:50 -05:00
|
|
|
/// Gets the next token out of the lexer.
|
2019-06-04 18:48:40 +03:00
|
|
|
fn try_next_token(&mut self) -> Result<Token, HighlightError> {
|
2019-07-02 13:44:38 +03:00
|
|
|
if let Some(token) = self.peek_token.take() {
|
|
|
|
return Ok(token);
|
2017-08-16 20:08:27 -04:00
|
|
|
}
|
2019-07-30 13:45:08 +03:00
|
|
|
let token = self.lexer.next_token();
|
|
|
|
if let token::Unknown(..) = &token.kind {
|
|
|
|
return Err(HighlightError::LexError);
|
|
|
|
}
|
|
|
|
Ok(token)
|
2019-07-02 13:44:38 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
fn peek(&mut self) -> Result<&Token, HighlightError> {
|
|
|
|
if self.peek_token.is_none() {
|
2019-07-30 13:45:08 +03:00
|
|
|
let token = self.lexer.next_token();
|
|
|
|
if let token::Unknown(..) = &token.kind {
|
|
|
|
return Err(HighlightError::LexError);
|
|
|
|
}
|
|
|
|
self.peek_token = Some(token);
|
2019-07-02 13:44:38 +03:00
|
|
|
}
|
|
|
|
Ok(self.peek_token.as_ref().unwrap())
|
2017-08-16 20:08:27 -04:00
|
|
|
}
|
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
/// Exhausts the `lexer` writing the output into `out`.
|
|
|
|
///
|
|
|
|
/// The general structure for this method is to iterate over each token,
|
|
|
|
/// possibly giving it an HTML span with a class specifying what flavor of token
|
|
|
|
/// is used. All source code emission is done as slices from the source map,
|
|
|
|
/// not from the tokens themselves, in order to stay true to the original
|
|
|
|
/// source.
|
2019-12-22 17:42:04 -05:00
|
|
|
fn write_source<W: Writer>(&mut self, out: &mut W) -> Result<(), HighlightError> {
|
2016-05-02 10:53:24 +12:00
|
|
|
loop {
|
2017-08-16 20:08:27 -04:00
|
|
|
let next = self.try_next_token()?;
|
2019-06-04 18:48:40 +03:00
|
|
|
if next == token::Eof {
|
2016-05-02 10:53:24 +12:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
self.write_token(out, next)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
// Handles an individual token from the lexer.
|
2019-12-22 17:42:04 -05:00
|
|
|
fn write_token<W: Writer>(&mut self, out: &mut W, token: Token) -> Result<(), HighlightError> {
|
2019-06-04 18:48:40 +03:00
|
|
|
let klass = match token.kind {
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Shebang(s) => {
|
2018-07-22 14:10:10 -06:00
|
|
|
out.string(Escape(&s.as_str()), Class::None)?;
|
2016-05-02 10:53:24 +12:00
|
|
|
return Ok(());
|
2019-12-22 17:42:04 -05:00
|
|
|
}
|
2016-05-02 10:53:24 +12:00
|
|
|
|
2019-07-30 12:31:41 +03:00
|
|
|
token::Whitespace | token::Unknown(..) => Class::None,
|
2016-05-02 10:53:24 +12:00
|
|
|
token::Comment => Class::Comment,
|
|
|
|
token::DocComment(..) => Class::DocComment,
|
|
|
|
|
2016-12-23 12:23:50 +05:30
|
|
|
// If this '&' or '*' token is followed by a non-whitespace token, assume that it's the
|
|
|
|
// reference or dereference operator or a reference or pointer type, instead of the
|
|
|
|
// bit-and or multiplication operator.
|
|
|
|
token::BinOp(token::And) | token::BinOp(token::Star)
|
2019-12-22 17:42:04 -05:00
|
|
|
if self.peek()? != &token::Whitespace =>
|
|
|
|
{
|
|
|
|
Class::RefKeyWord
|
|
|
|
}
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
// Consider this as part of a macro invocation if there was a
|
|
|
|
// leading identifier.
|
|
|
|
token::Not if self.in_macro => {
|
|
|
|
self.in_macro = false;
|
|
|
|
Class::Macro
|
|
|
|
}
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
// Operators.
|
2019-12-22 17:42:04 -05:00
|
|
|
token::Eq
|
|
|
|
| token::Lt
|
|
|
|
| token::Le
|
|
|
|
| token::EqEq
|
|
|
|
| token::Ne
|
|
|
|
| token::Ge
|
|
|
|
| token::Gt
|
|
|
|
| token::AndAnd
|
|
|
|
| token::OrOr
|
|
|
|
| token::Not
|
|
|
|
| token::BinOp(..)
|
|
|
|
| token::RArrow
|
|
|
|
| token::BinOpEq(..)
|
|
|
|
| token::FatArrow => Class::Op,
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
// Miscellaneous, no highlighting.
|
2019-12-22 17:42:04 -05:00
|
|
|
token::Dot
|
|
|
|
| token::DotDot
|
|
|
|
| token::DotDotDot
|
|
|
|
| token::DotDotEq
|
|
|
|
| token::Comma
|
|
|
|
| token::Semi
|
|
|
|
| token::Colon
|
|
|
|
| token::ModSep
|
|
|
|
| token::LArrow
|
|
|
|
| token::OpenDelim(_)
|
|
|
|
| token::CloseDelim(token::Brace)
|
|
|
|
| token::CloseDelim(token::Paren)
|
|
|
|
| token::CloseDelim(token::NoDelim) => Class::None,
|
2016-10-12 05:23:37 +02:00
|
|
|
|
|
|
|
token::Question => Class::QuestionMark,
|
|
|
|
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Dollar => {
|
2019-07-02 13:44:38 +03:00
|
|
|
if self.peek()?.is_ident() {
|
2016-05-02 10:53:24 +12:00
|
|
|
self.in_macro_nonterminal = true;
|
|
|
|
Class::MacroNonTerminal
|
2014-03-02 13:30:28 +11:00
|
|
|
} else {
|
2016-05-02 10:53:24 +12:00
|
|
|
Class::None
|
2014-03-02 13:30:28 +11:00
|
|
|
}
|
|
|
|
}
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2017-08-16 20:08:27 -04:00
|
|
|
// This might be the start of an attribute. We're going to want to
|
2014-02-20 01:14:51 -08:00
|
|
|
// continue highlighting it as an attribute until the ending ']' is
|
|
|
|
// seen, so skip out early. Down below we terminate the attribute
|
|
|
|
// span when we see the ']'.
|
2014-10-27 19:22:52 +11:00
|
|
|
token::Pound => {
|
2017-08-16 20:08:27 -04:00
|
|
|
// We can't be sure that our # begins an attribute (it could
|
|
|
|
// just be appearing in a macro) until we read either `#![` or
|
|
|
|
// `#[` from the input stream.
|
|
|
|
//
|
|
|
|
// We don't want to start highlighting as an attribute until
|
|
|
|
// we're confident there is going to be a ] coming up, as
|
|
|
|
// otherwise # tokens in macros highlight the rest of the input
|
|
|
|
// as an attribute.
|
|
|
|
|
|
|
|
// Case 1: #![inner_attribute]
|
2019-07-02 13:44:38 +03:00
|
|
|
if self.peek()? == &token::Not {
|
2017-08-16 20:08:27 -04:00
|
|
|
self.try_next_token()?; // NOTE: consumes `!` token!
|
2019-07-02 13:44:38 +03:00
|
|
|
if self.peek()? == &token::OpenDelim(token::Bracket) {
|
2017-08-16 20:08:27 -04:00
|
|
|
self.in_attribute = true;
|
|
|
|
out.enter_span(Class::Attribute)?;
|
|
|
|
}
|
2018-07-22 14:10:10 -06:00
|
|
|
out.string("#", Class::None)?;
|
|
|
|
out.string("!", Class::None)?;
|
2017-08-16 20:08:27 -04:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
// Case 2: #[outer_attribute]
|
2019-07-02 13:44:38 +03:00
|
|
|
if self.peek()? == &token::OpenDelim(token::Bracket) {
|
2017-08-16 20:08:27 -04:00
|
|
|
self.in_attribute = true;
|
|
|
|
out.enter_span(Class::Attribute)?;
|
|
|
|
}
|
2018-07-22 14:10:10 -06:00
|
|
|
out.string("#", Class::None)?;
|
2016-05-02 10:53:24 +12:00
|
|
|
return Ok(());
|
2014-02-20 01:14:51 -08:00
|
|
|
}
|
2014-10-29 21:37:54 +11:00
|
|
|
token::CloseDelim(token::Bracket) => {
|
2016-05-02 10:53:24 +12:00
|
|
|
if self.in_attribute {
|
|
|
|
self.in_attribute = false;
|
2018-07-22 14:10:10 -06:00
|
|
|
out.string("]", Class::None)?;
|
2016-05-02 10:53:24 +12:00
|
|
|
out.exit_span()?;
|
|
|
|
return Ok(());
|
2014-02-20 01:14:51 -08:00
|
|
|
} else {
|
2016-05-02 10:53:24 +12:00
|
|
|
Class::None
|
2014-02-20 01:14:51 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-19 01:04:26 +03:00
|
|
|
token::Literal(lit) => {
|
|
|
|
match lit.kind {
|
2016-05-02 10:53:24 +12:00
|
|
|
// Text literals.
|
2019-12-22 17:42:04 -05:00
|
|
|
token::Byte
|
|
|
|
| token::Char
|
|
|
|
| token::Err
|
|
|
|
| token::ByteStr
|
|
|
|
| token::ByteStrRaw(..)
|
|
|
|
| token::Str
|
|
|
|
| token::StrRaw(..) => Class::String,
|
2014-11-19 15:48:38 +11:00
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
// Number literals.
|
2019-05-19 01:04:26 +03:00
|
|
|
token::Integer | token::Float => Class::Number,
|
2019-05-09 02:17:32 +03:00
|
|
|
|
2019-05-19 01:04:26 +03:00
|
|
|
token::Bool => panic!("literal token contains `Lit::Bool`"),
|
2014-11-19 15:48:38 +11:00
|
|
|
}
|
|
|
|
}
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
// Keywords are also included in the identifier set.
|
2019-12-22 17:42:04 -05:00
|
|
|
token::Ident(name, is_raw) => match name {
|
|
|
|
kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord,
|
|
|
|
|
|
|
|
kw::SelfLower | kw::SelfUpper => Class::Self_,
|
|
|
|
kw::False | kw::True if !is_raw => Class::Bool,
|
|
|
|
|
|
|
|
sym::Option | sym::Result => Class::PreludeTy,
|
|
|
|
sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
|
|
|
|
|
|
|
|
_ if token.is_reserved_ident() => Class::KeyWord,
|
|
|
|
|
|
|
|
_ => {
|
|
|
|
if self.in_macro_nonterminal {
|
|
|
|
self.in_macro_nonterminal = false;
|
|
|
|
Class::MacroNonTerminal
|
|
|
|
} else if self.peek()? == &token::Not {
|
|
|
|
self.in_macro = true;
|
|
|
|
Class::Macro
|
|
|
|
} else {
|
|
|
|
Class::Ident
|
2014-02-20 01:14:51 -08:00
|
|
|
}
|
|
|
|
}
|
2019-12-22 17:42:04 -05:00
|
|
|
},
|
2014-02-20 01:14:51 -08:00
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
token::Lifetime(..) => Class::Lifetime,
|
2014-09-15 18:27:28 -07:00
|
|
|
|
2019-12-22 17:42:04 -05:00
|
|
|
token::Eof
|
|
|
|
| token::Interpolated(..)
|
|
|
|
| token::Tilde
|
|
|
|
| token::At
|
|
|
|
| token::SingleQuote => Class::None,
|
2014-02-20 01:14:51 -08:00
|
|
|
};
|
|
|
|
|
2016-05-02 10:53:24 +12:00
|
|
|
// Anything that didn't return above is the simple case where we the
|
|
|
|
// class just spans a single token, so we can use the `string` method.
|
2019-06-04 18:48:40 +03:00
|
|
|
out.string(Escape(&self.snip(token.span)), klass)?;
|
2018-12-15 16:25:50 -05:00
|
|
|
|
|
|
|
Ok(())
|
2014-02-20 01:14:51 -08:00
|
|
|
}
|
|
|
|
|
2018-08-18 12:14:14 +02:00
|
|
|
// Helper function to get a snippet from the source_map.
|
2016-05-02 10:53:24 +12:00
|
|
|
fn snip(&self, sp: Span) -> String {
|
2018-08-18 12:14:14 +02:00
|
|
|
self.source_map.span_to_snippet(sp).unwrap()
|
2016-05-02 10:53:24 +12:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Class {
|
|
|
|
/// Returns the css class expected by rustdoc for each `Class`.
|
2018-07-22 14:10:10 -06:00
|
|
|
fn rustdoc_class(self) -> &'static str {
|
2016-05-02 10:53:24 +12:00
|
|
|
match self {
|
|
|
|
Class::None => "",
|
|
|
|
Class::Comment => "comment",
|
|
|
|
Class::DocComment => "doccomment",
|
|
|
|
Class::Attribute => "attribute",
|
|
|
|
Class::KeyWord => "kw",
|
|
|
|
Class::RefKeyWord => "kw-2",
|
|
|
|
Class::Self_ => "self",
|
|
|
|
Class::Op => "op",
|
|
|
|
Class::Macro => "macro",
|
|
|
|
Class::MacroNonTerminal => "macro-nonterminal",
|
|
|
|
Class::String => "string",
|
|
|
|
Class::Number => "number",
|
2016-05-15 23:06:51 +02:00
|
|
|
Class::Bool => "bool-val",
|
2016-05-02 10:53:24 +12:00
|
|
|
Class::Ident => "ident",
|
|
|
|
Class::Lifetime => "lifetime",
|
|
|
|
Class::PreludeTy => "prelude-ty",
|
|
|
|
Class::PreludeVal => "prelude-val",
|
2019-12-22 17:42:04 -05:00
|
|
|
Class::QuestionMark => "question-mark",
|
2016-05-02 10:53:24 +12:00
|
|
|
}
|
|
|
|
}
|
2016-04-04 11:07:41 +12:00
|
|
|
}
|
|
|
|
|
2018-07-28 18:15:52 -06:00
|
|
|
fn write_header(class: Option<&str>, out: &mut dyn Write) -> io::Result<()> {
|
2018-10-08 22:51:37 +02:00
|
|
|
write!(out, "<div class=\"example-wrap\"><pre class=\"rust {}\">\n", class.unwrap_or(""))
|
2016-04-04 11:07:41 +12:00
|
|
|
}
|
|
|
|
|
2020-01-26 17:24:40 +01:00
|
|
|
fn write_footer(out: &mut dyn Write, playground_button: Option<&str>) -> io::Result<()> {
|
|
|
|
write!(out, "</pre>{}</div>\n", if let Some(button) = playground_button { button } else { "" })
|
2014-02-20 01:14:51 -08:00
|
|
|
}
|