RPM build fix (reverted CI changes which will need to be un-reverted or made conditional) and vendor Rust dependencies to make builds much faster in any CI system.
This commit is contained in:
75
zeroidc/vendor/proc-macro2/src/detection.rs
vendored
Normal file
75
zeroidc/vendor/proc-macro2/src/detection.rs
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::Once;
|
||||
|
||||
static WORKS: AtomicUsize = AtomicUsize::new(0);
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
pub(crate) fn inside_proc_macro() -> bool {
|
||||
match WORKS.load(Ordering::Relaxed) {
|
||||
1 => return false,
|
||||
2 => return true,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
INIT.call_once(initialize);
|
||||
inside_proc_macro()
|
||||
}
|
||||
|
||||
pub(crate) fn force_fallback() {
|
||||
WORKS.store(1, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub(crate) fn unforce_fallback() {
|
||||
initialize();
|
||||
}
|
||||
|
||||
#[cfg(not(no_is_available))]
|
||||
fn initialize() {
|
||||
let available = proc_macro::is_available();
|
||||
WORKS.store(available as usize + 1, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
|
||||
// then use catch_unwind to determine whether the compiler's proc_macro is
|
||||
// working. When proc-macro2 is used from outside of a procedural macro all
|
||||
// of the proc_macro crate's APIs currently panic.
|
||||
//
|
||||
// The Once is to prevent the possibility of this ordering:
|
||||
//
|
||||
// thread 1 calls take_hook, gets the user's original hook
|
||||
// thread 1 calls set_hook with the null hook
|
||||
// thread 2 calls take_hook, thinks null hook is the original hook
|
||||
// thread 2 calls set_hook with the null hook
|
||||
// thread 1 calls set_hook with the actual original hook
|
||||
// thread 2 calls set_hook with what it thinks is the original hook
|
||||
//
|
||||
// in which the user's hook has been lost.
|
||||
//
|
||||
// There is still a race condition where a panic in a different thread can
|
||||
// happen during the interval that the user's original panic hook is
|
||||
// unregistered such that their hook is incorrectly not called. This is
|
||||
// sufficiently unlikely and less bad than printing panic messages to stderr
|
||||
// on correct use of this crate. Maybe there is a libstd feature request
|
||||
// here. For now, if a user needs to guarantee that this failure mode does
|
||||
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
||||
// the main thread before launching any other threads.
|
||||
#[cfg(no_is_available)]
|
||||
fn initialize() {
|
||||
use std::panic::{self, PanicInfo};
|
||||
|
||||
type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
|
||||
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
||||
let sanity_check = &*null_hook as *const PanicHook;
|
||||
let original_hook = panic::take_hook();
|
||||
panic::set_hook(null_hook);
|
||||
|
||||
let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
|
||||
WORKS.store(works as usize + 1, Ordering::Relaxed);
|
||||
|
||||
let hopefully_null_hook = panic::take_hook();
|
||||
panic::set_hook(original_hook);
|
||||
if sanity_check != &*hopefully_null_hook {
|
||||
panic!("observed race condition in proc_macro2::inside_proc_macro");
|
||||
}
|
||||
}
|
||||
937
zeroidc/vendor/proc-macro2/src/fallback.rs
vendored
Normal file
937
zeroidc/vendor/proc-macro2/src/fallback.rs
vendored
Normal file
@@ -0,0 +1,937 @@
|
||||
use crate::parse::{self, Cursor};
|
||||
use crate::{Delimiter, Spacing, TokenTree};
|
||||
#[cfg(span_locations)]
|
||||
use std::cell::RefCell;
|
||||
#[cfg(span_locations)]
|
||||
use std::cmp;
|
||||
use std::fmt::{self, Debug, Display, Write};
|
||||
use std::iter::FromIterator;
|
||||
use std::mem;
|
||||
use std::ops::RangeBounds;
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::vec;
|
||||
|
||||
/// Force use of proc-macro2's fallback implementation of the API for now, even
|
||||
/// if the compiler's implementation is available.
|
||||
pub fn force() {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
crate::detection::force_fallback();
|
||||
}
|
||||
|
||||
/// Resume using the compiler's implementation of the proc macro API if it is
|
||||
/// available.
|
||||
pub fn unforce() {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
crate::detection::unforce_fallback();
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct TokenStream {
|
||||
inner: Vec<TokenTree>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LexError {
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl LexError {
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
fn call_site() -> Self {
|
||||
LexError {
|
||||
span: Span::call_site(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new() -> Self {
|
||||
TokenStream { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.inner.len() == 0
|
||||
}
|
||||
|
||||
fn take_inner(&mut self) -> Vec<TokenTree> {
|
||||
mem::replace(&mut self.inner, Vec::new())
|
||||
}
|
||||
|
||||
fn push_token(&mut self, token: TokenTree) {
|
||||
// https://github.com/dtolnay/proc-macro2/issues/235
|
||||
match token {
|
||||
#[cfg(not(no_bind_by_move_pattern_guard))]
|
||||
TokenTree::Literal(crate::Literal {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
inner: crate::imp::Literal::Fallback(literal),
|
||||
#[cfg(not(wrap_proc_macro))]
|
||||
inner: literal,
|
||||
..
|
||||
}) if literal.repr.starts_with('-') => {
|
||||
push_negative_literal(self, literal);
|
||||
}
|
||||
#[cfg(no_bind_by_move_pattern_guard)]
|
||||
TokenTree::Literal(crate::Literal {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
inner: crate::imp::Literal::Fallback(literal),
|
||||
#[cfg(not(wrap_proc_macro))]
|
||||
inner: literal,
|
||||
..
|
||||
}) => {
|
||||
if literal.repr.starts_with('-') {
|
||||
push_negative_literal(self, literal);
|
||||
} else {
|
||||
self.inner
|
||||
.push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
|
||||
}
|
||||
}
|
||||
_ => self.inner.push(token),
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
|
||||
literal.repr.remove(0);
|
||||
let mut punct = crate::Punct::new('-', Spacing::Alone);
|
||||
punct.set_span(crate::Span::_new_stable(literal.span));
|
||||
stream.inner.push(TokenTree::Punct(punct));
|
||||
stream
|
||||
.inner
|
||||
.push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<TokenTree>> for TokenStream {
|
||||
fn from(inner: Vec<TokenTree>) -> Self {
|
||||
TokenStream { inner }
|
||||
}
|
||||
}
|
||||
|
||||
// Nonrecursive to prevent stack overflow.
|
||||
impl Drop for TokenStream {
|
||||
fn drop(&mut self) {
|
||||
while let Some(token) = self.inner.pop() {
|
||||
let group = match token {
|
||||
TokenTree::Group(group) => group.inner,
|
||||
_ => continue,
|
||||
};
|
||||
#[cfg(wrap_proc_macro)]
|
||||
let group = match group {
|
||||
crate::imp::Group::Fallback(group) => group,
|
||||
crate::imp::Group::Compiler(_) => continue,
|
||||
};
|
||||
let mut group = group;
|
||||
self.inner.extend(group.stream.take_inner());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn get_cursor(src: &str) -> Cursor {
|
||||
// Create a dummy file & add it to the source map
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let mut cm = cm.borrow_mut();
|
||||
let name = format!("<parsed string {}>", cm.files.len());
|
||||
let span = cm.add_file(&name, src);
|
||||
Cursor {
|
||||
rest: src,
|
||||
off: span.lo,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
fn get_cursor(src: &str) -> Cursor {
|
||||
Cursor { rest: src }
|
||||
}
|
||||
|
||||
impl FromStr for TokenStream {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
// Create a dummy file & add it to the source map
|
||||
let cursor = get_cursor(src);
|
||||
|
||||
parse::token_stream(cursor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str("cannot parse string into token stream")
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut joint = false;
|
||||
for (i, tt) in self.inner.iter().enumerate() {
|
||||
if i != 0 && !joint {
|
||||
write!(f, " ")?;
|
||||
}
|
||||
joint = false;
|
||||
match tt {
|
||||
TokenTree::Group(tt) => Display::fmt(tt, f),
|
||||
TokenTree::Ident(tt) => Display::fmt(tt, f),
|
||||
TokenTree::Punct(tt) => {
|
||||
joint = tt.spacing() == Spacing::Joint;
|
||||
Display::fmt(tt, f)
|
||||
}
|
||||
TokenTree::Literal(tt) => Display::fmt(tt, f),
|
||||
}?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str("TokenStream ")?;
|
||||
f.debug_list().entries(self.clone()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> TokenStream {
|
||||
inner
|
||||
.to_string()
|
||||
.parse()
|
||||
.expect("compiler token stream parse failed")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> proc_macro::TokenStream {
|
||||
inner
|
||||
.to_string()
|
||||
.parse()
|
||||
.expect("failed to parse to compiler tokens")
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(tree: TokenTree) -> TokenStream {
|
||||
let mut stream = TokenStream::new();
|
||||
stream.push_token(tree);
|
||||
stream
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
|
||||
let mut stream = TokenStream::new();
|
||||
stream.extend(tokens);
|
||||
stream
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut v = Vec::new();
|
||||
|
||||
for mut stream in streams {
|
||||
v.extend(stream.take_inner());
|
||||
}
|
||||
|
||||
TokenStream { inner: v }
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
|
||||
tokens.into_iter().for_each(|token| self.push_token(token));
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenStream> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||
self.inner.extend(streams.into_iter().flatten());
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
|
||||
|
||||
impl IntoIterator for TokenStream {
|
||||
type Item = TokenTree;
|
||||
type IntoIter = TokenTreeIter;
|
||||
|
||||
fn into_iter(mut self) -> TokenTreeIter {
|
||||
self.take_inner().into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub(crate) struct SourceFile {
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl SourceFile {
|
||||
/// Get the path to this source file as a string.
|
||||
pub fn path(&self) -> PathBuf {
|
||||
self.path.clone()
|
||||
}
|
||||
|
||||
pub fn is_real(&self) -> bool {
|
||||
// XXX(nika): Support real files in the future?
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for SourceFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("SourceFile")
|
||||
.field("path", &self.path())
|
||||
.field("is_real", &self.is_real())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct LineColumn {
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
thread_local! {
|
||||
static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
|
||||
// NOTE: We start with a single dummy file which all call_site() and
|
||||
// def_site() spans reference.
|
||||
files: vec![FileInfo {
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
name: "<unspecified>".to_owned(),
|
||||
span: Span { lo: 0, hi: 0 },
|
||||
lines: vec![0],
|
||||
}],
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
struct FileInfo {
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
name: String,
|
||||
span: Span,
|
||||
lines: Vec<usize>,
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
impl FileInfo {
|
||||
fn offset_line_column(&self, offset: usize) -> LineColumn {
|
||||
assert!(self.span_within(Span {
|
||||
lo: offset as u32,
|
||||
hi: offset as u32
|
||||
}));
|
||||
let offset = offset - self.span.lo as usize;
|
||||
match self.lines.binary_search(&offset) {
|
||||
Ok(found) => LineColumn {
|
||||
line: found + 1,
|
||||
column: 0,
|
||||
},
|
||||
Err(idx) => LineColumn {
|
||||
line: idx,
|
||||
column: offset - self.lines[idx - 1],
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn span_within(&self, span: Span) -> bool {
|
||||
span.lo >= self.span.lo && span.hi <= self.span.hi
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the offsets of each line in the given source string
|
||||
/// and the total number of characters
|
||||
#[cfg(span_locations)]
|
||||
fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
|
||||
let mut lines = vec![0];
|
||||
let mut total = 0;
|
||||
|
||||
for ch in s.chars() {
|
||||
total += 1;
|
||||
if ch == '\n' {
|
||||
lines.push(total);
|
||||
}
|
||||
}
|
||||
|
||||
(total, lines)
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
struct SourceMap {
|
||||
files: Vec<FileInfo>,
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
impl SourceMap {
|
||||
fn next_start_pos(&self) -> u32 {
|
||||
// Add 1 so there's always space between files.
|
||||
//
|
||||
// We'll always have at least 1 file, as we initialize our files list
|
||||
// with a dummy file.
|
||||
self.files.last().unwrap().span.hi + 1
|
||||
}
|
||||
|
||||
fn add_file(&mut self, name: &str, src: &str) -> Span {
|
||||
let (len, lines) = lines_offsets(src);
|
||||
let lo = self.next_start_pos();
|
||||
// XXX(nika): Shouild we bother doing a checked cast or checked add here?
|
||||
let span = Span {
|
||||
lo,
|
||||
hi: lo + (len as u32),
|
||||
};
|
||||
|
||||
self.files.push(FileInfo {
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
name: name.to_owned(),
|
||||
span,
|
||||
lines,
|
||||
});
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let _ = name;
|
||||
|
||||
span
|
||||
}
|
||||
|
||||
fn fileinfo(&self, span: Span) -> &FileInfo {
|
||||
for file in &self.files {
|
||||
if file.span_within(span) {
|
||||
return file;
|
||||
}
|
||||
}
|
||||
panic!("Invalid span with no related FileInfo!");
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) struct Span {
|
||||
#[cfg(span_locations)]
|
||||
pub(crate) lo: u32,
|
||||
#[cfg(span_locations)]
|
||||
pub(crate) hi: u32,
|
||||
}
|
||||
|
||||
impl Span {
|
||||
#[cfg(not(span_locations))]
|
||||
pub fn call_site() -> Self {
|
||||
Span {}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub fn call_site() -> Self {
|
||||
Span { lo: 0, hi: 0 }
|
||||
}
|
||||
|
||||
#[cfg(not(no_hygiene))]
|
||||
pub fn mixed_site() -> Self {
|
||||
Span::call_site()
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn def_site() -> Self {
|
||||
Span::call_site()
|
||||
}
|
||||
|
||||
pub fn resolved_at(&self, _other: Span) -> Span {
|
||||
// Stable spans consist only of line/column information, so
|
||||
// `resolved_at` and `located_at` only select which span the
|
||||
// caller wants line/column information from.
|
||||
*self
|
||||
}
|
||||
|
||||
pub fn located_at(&self, other: Span) -> Span {
|
||||
other
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
SourceFile {
|
||||
path: Path::new(&fi.name).to_owned(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
fi.offset_line_column(self.lo as usize)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
fi.offset_line_column(self.hi as usize)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
pub fn join(&self, _other: Span) -> Option<Span> {
|
||||
Some(Span {})
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
// If `other` is not within the same FileInfo as us, return None.
|
||||
if !cm.fileinfo(*self).span_within(other) {
|
||||
return None;
|
||||
}
|
||||
Some(Span {
|
||||
lo: cmp::min(self.lo, other.lo),
|
||||
hi: cmp::max(self.hi, other.hi),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
fn first_byte(self) -> Self {
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn first_byte(self) -> Self {
|
||||
Span {
|
||||
lo: self.lo,
|
||||
hi: cmp::min(self.lo.saturating_add(1), self.hi),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
fn last_byte(self) -> Self {
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn last_byte(self) -> Self {
|
||||
Span {
|
||||
lo: cmp::max(self.hi.saturating_sub(1), self.lo),
|
||||
hi: self.hi,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Span {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
#[cfg(span_locations)]
|
||||
return write!(f, "bytes({}..{})", self.lo, self.hi);
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
write!(f, "Span")
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
||||
#[cfg(span_locations)]
|
||||
{
|
||||
if span.lo == 0 && span.hi == 0 {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if cfg!(span_locations) {
|
||||
debug.field("span", &span);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct Group {
|
||||
delimiter: Delimiter,
|
||||
stream: TokenStream,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl Group {
|
||||
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
|
||||
Group {
|
||||
delimiter,
|
||||
stream,
|
||||
span: Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delimiter(&self) -> Delimiter {
|
||||
self.delimiter
|
||||
}
|
||||
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
self.stream.clone()
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn span_open(&self) -> Span {
|
||||
self.span.first_byte()
|
||||
}
|
||||
|
||||
pub fn span_close(&self) -> Span {
|
||||
self.span.last_byte()
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Group {
|
||||
// We attempt to match libproc_macro's formatting.
|
||||
// Empty parens: ()
|
||||
// Nonempty parens: (...)
|
||||
// Empty brackets: []
|
||||
// Nonempty brackets: [...]
|
||||
// Empty braces: { }
|
||||
// Nonempty braces: { ... }
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let (open, close) = match self.delimiter {
|
||||
Delimiter::Parenthesis => ("(", ")"),
|
||||
Delimiter::Brace => ("{ ", "}"),
|
||||
Delimiter::Bracket => ("[", "]"),
|
||||
Delimiter::None => ("", ""),
|
||||
};
|
||||
|
||||
f.write_str(open)?;
|
||||
Display::fmt(&self.stream, f)?;
|
||||
if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
|
||||
f.write_str(" ")?;
|
||||
}
|
||||
f.write_str(close)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Group {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut debug = fmt.debug_struct("Group");
|
||||
debug.field("delimiter", &self.delimiter);
|
||||
debug.field("stream", &self.stream);
|
||||
debug_span_field_if_nontrivial(&mut debug, self.span);
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct Ident {
|
||||
sym: String,
|
||||
span: Span,
|
||||
raw: bool,
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
fn _new(string: &str, raw: bool, span: Span) -> Self {
|
||||
validate_ident(string);
|
||||
|
||||
Ident {
|
||||
sym: string.to_owned(),
|
||||
span,
|
||||
raw,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(string: &str, span: Span) -> Self {
|
||||
Ident::_new(string, false, span)
|
||||
}
|
||||
|
||||
pub fn new_raw(string: &str, span: Span) -> Self {
|
||||
Ident::_new(string, true, span)
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_ident_start(c: char) -> bool {
|
||||
c == '_' || unicode_ident::is_xid_start(c)
|
||||
}
|
||||
|
||||
pub(crate) fn is_ident_continue(c: char) -> bool {
|
||||
unicode_ident::is_xid_continue(c)
|
||||
}
|
||||
|
||||
fn validate_ident(string: &str) {
|
||||
let validate = string;
|
||||
if validate.is_empty() {
|
||||
panic!("Ident is not allowed to be empty; use Option<Ident>");
|
||||
}
|
||||
|
||||
if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
|
||||
panic!("Ident cannot be a number; use Literal instead");
|
||||
}
|
||||
|
||||
fn ident_ok(string: &str) -> bool {
|
||||
let mut chars = string.chars();
|
||||
let first = chars.next().unwrap();
|
||||
if !is_ident_start(first) {
|
||||
return false;
|
||||
}
|
||||
for ch in chars {
|
||||
if !is_ident_continue(ch) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
if !ident_ok(validate) {
|
||||
panic!("{:?} is not a valid Ident", string);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Ident {
|
||||
fn eq(&self, other: &Ident) -> bool {
|
||||
self.sym == other.sym && self.raw == other.raw
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq<T> for Ident
|
||||
where
|
||||
T: ?Sized + AsRef<str>,
|
||||
{
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
let other = other.as_ref();
|
||||
if self.raw {
|
||||
other.starts_with("r#") && self.sym == other[2..]
|
||||
} else {
|
||||
self.sym == other
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.raw {
|
||||
f.write_str("r#")?;
|
||||
}
|
||||
Display::fmt(&self.sym, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Ident {
|
||||
// Ident(proc_macro), Ident(r#union)
|
||||
#[cfg(not(span_locations))]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut debug = f.debug_tuple("Ident");
|
||||
debug.field(&format_args!("{}", self));
|
||||
debug.finish()
|
||||
}
|
||||
|
||||
// Ident {
|
||||
// sym: proc_macro,
|
||||
// span: bytes(128..138)
|
||||
// }
|
||||
#[cfg(span_locations)]
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut debug = f.debug_struct("Ident");
|
||||
debug.field("sym", &format_args!("{}", self));
|
||||
debug_span_field_if_nontrivial(&mut debug, self.span);
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct Literal {
|
||||
repr: String,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
macro_rules! suffixed_numbers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
Literal::_new(format!(concat!("{}", stringify!($kind)), n))
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
macro_rules! unsuffixed_numbers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
Literal::_new(n.to_string())
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
pub(crate) fn _new(repr: String) -> Self {
|
||||
Literal {
|
||||
repr,
|
||||
span: Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) unsafe fn from_str_unchecked(repr: &str) -> Self {
|
||||
Literal::_new(repr.to_owned())
|
||||
}
|
||||
|
||||
suffixed_numbers! {
|
||||
u8_suffixed => u8,
|
||||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
unsuffixed_numbers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
i128_unsuffixed => i128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||
let mut s = f.to_string();
|
||||
if !s.contains('.') {
|
||||
s.push_str(".0");
|
||||
}
|
||||
Literal::_new(s)
|
||||
}
|
||||
|
||||
pub fn f64_unsuffixed(f: f64) -> Literal {
|
||||
let mut s = f.to_string();
|
||||
if !s.contains('.') {
|
||||
s.push_str(".0");
|
||||
}
|
||||
Literal::_new(s)
|
||||
}
|
||||
|
||||
pub fn string(t: &str) -> Literal {
|
||||
let mut repr = String::with_capacity(t.len() + 2);
|
||||
repr.push('"');
|
||||
for c in t.chars() {
|
||||
if c == '\'' {
|
||||
// escape_debug turns this into "\'" which is unnecessary.
|
||||
repr.push(c);
|
||||
} else {
|
||||
repr.extend(c.escape_debug());
|
||||
}
|
||||
}
|
||||
repr.push('"');
|
||||
Literal::_new(repr)
|
||||
}
|
||||
|
||||
pub fn character(t: char) -> Literal {
|
||||
let mut repr = String::new();
|
||||
repr.push('\'');
|
||||
if t == '"' {
|
||||
// escape_debug turns this into '\"' which is unnecessary.
|
||||
repr.push(t);
|
||||
} else {
|
||||
repr.extend(t.escape_debug());
|
||||
}
|
||||
repr.push('\'');
|
||||
Literal::_new(repr)
|
||||
}
|
||||
|
||||
pub fn byte_string(bytes: &[u8]) -> Literal {
|
||||
let mut escaped = "b\"".to_string();
|
||||
for b in bytes {
|
||||
#[allow(clippy::match_overlapping_arm)]
|
||||
match *b {
|
||||
b'\0' => escaped.push_str(r"\0"),
|
||||
b'\t' => escaped.push_str(r"\t"),
|
||||
b'\n' => escaped.push_str(r"\n"),
|
||||
b'\r' => escaped.push_str(r"\r"),
|
||||
b'"' => escaped.push_str("\\\""),
|
||||
b'\\' => escaped.push_str("\\\\"),
|
||||
b'\x20'..=b'\x7E' => escaped.push(*b as char),
|
||||
_ => {
|
||||
let _ = write!(escaped, "\\x{:02X}", b);
|
||||
}
|
||||
}
|
||||
}
|
||||
escaped.push('"');
|
||||
Literal::_new(escaped)
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Literal {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(mut repr: &str) -> Result<Self, Self::Err> {
|
||||
let negative = repr.starts_with('-');
|
||||
if negative {
|
||||
repr = &repr[1..];
|
||||
if !repr.starts_with(|ch: char| ch.is_ascii_digit()) {
|
||||
return Err(LexError::call_site());
|
||||
}
|
||||
}
|
||||
let cursor = get_cursor(repr);
|
||||
if let Ok((_rest, mut literal)) = parse::literal(cursor) {
|
||||
if literal.repr.len() == repr.len() {
|
||||
if negative {
|
||||
literal.repr.insert(0, '-');
|
||||
}
|
||||
return Ok(literal);
|
||||
}
|
||||
}
|
||||
Err(LexError::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
Display::fmt(&self.repr, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Literal {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut debug = fmt.debug_struct("Literal");
|
||||
debug.field("lit", &format_args!("{}", self.repr));
|
||||
debug_span_field_if_nontrivial(&mut debug, self.span);
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
1312
zeroidc/vendor/proc-macro2/src/lib.rs
vendored
Normal file
1312
zeroidc/vendor/proc-macro2/src/lib.rs
vendored
Normal file
File diff suppressed because it is too large
Load Diff
18
zeroidc/vendor/proc-macro2/src/marker.rs
vendored
Normal file
18
zeroidc/vendor/proc-macro2/src/marker.rs
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
use std::marker::PhantomData;
|
||||
use std::panic::{RefUnwindSafe, UnwindSafe};
|
||||
use std::rc::Rc;
|
||||
|
||||
// Zero sized marker with the correct set of autotrait impls we want all proc
|
||||
// macro types to have.
|
||||
pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
|
||||
|
||||
pub(crate) use self::value::*;
|
||||
|
||||
mod value {
|
||||
pub(crate) use std::marker::PhantomData as Marker;
|
||||
}
|
||||
|
||||
pub(crate) struct ProcMacroAutoTraits(Rc<()>);
|
||||
|
||||
impl UnwindSafe for ProcMacroAutoTraits {}
|
||||
impl RefUnwindSafe for ProcMacroAutoTraits {}
|
||||
865
zeroidc/vendor/proc-macro2/src/parse.rs
vendored
Normal file
865
zeroidc/vendor/proc-macro2/src/parse.rs
vendored
Normal file
@@ -0,0 +1,865 @@
|
||||
use crate::fallback::{
|
||||
is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
|
||||
};
|
||||
use crate::{Delimiter, Punct, Spacing, TokenTree};
|
||||
use std::char;
|
||||
use std::str::{Bytes, CharIndices, Chars};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub(crate) struct Cursor<'a> {
|
||||
pub rest: &'a str,
|
||||
#[cfg(span_locations)]
|
||||
pub off: u32,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
fn advance(&self, bytes: usize) -> Cursor<'a> {
|
||||
let (_front, rest) = self.rest.split_at(bytes);
|
||||
Cursor {
|
||||
rest,
|
||||
#[cfg(span_locations)]
|
||||
off: self.off + _front.chars().count() as u32,
|
||||
}
|
||||
}
|
||||
|
||||
fn starts_with(&self, s: &str) -> bool {
|
||||
self.rest.starts_with(s)
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.rest.is_empty()
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
self.rest.len()
|
||||
}
|
||||
|
||||
fn as_bytes(&self) -> &'a [u8] {
|
||||
self.rest.as_bytes()
|
||||
}
|
||||
|
||||
fn bytes(&self) -> Bytes<'a> {
|
||||
self.rest.bytes()
|
||||
}
|
||||
|
||||
fn chars(&self) -> Chars<'a> {
|
||||
self.rest.chars()
|
||||
}
|
||||
|
||||
fn char_indices(&self) -> CharIndices<'a> {
|
||||
self.rest.char_indices()
|
||||
}
|
||||
|
||||
fn parse(&self, tag: &str) -> Result<Cursor<'a>, Reject> {
|
||||
if self.starts_with(tag) {
|
||||
Ok(self.advance(tag.len()))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Reject;
|
||||
type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>;
|
||||
|
||||
fn skip_whitespace(input: Cursor) -> Cursor {
|
||||
let mut s = input;
|
||||
|
||||
while !s.is_empty() {
|
||||
let byte = s.as_bytes()[0];
|
||||
if byte == b'/' {
|
||||
if s.starts_with("//")
|
||||
&& (!s.starts_with("///") || s.starts_with("////"))
|
||||
&& !s.starts_with("//!")
|
||||
{
|
||||
let (cursor, _) = take_until_newline_or_eof(s);
|
||||
s = cursor;
|
||||
continue;
|
||||
} else if s.starts_with("/**/") {
|
||||
s = s.advance(4);
|
||||
continue;
|
||||
} else if s.starts_with("/*")
|
||||
&& (!s.starts_with("/**") || s.starts_with("/***"))
|
||||
&& !s.starts_with("/*!")
|
||||
{
|
||||
match block_comment(s) {
|
||||
Ok((rest, _)) => {
|
||||
s = rest;
|
||||
continue;
|
||||
}
|
||||
Err(Reject) => return s,
|
||||
}
|
||||
}
|
||||
}
|
||||
match byte {
|
||||
b' ' | 0x09..=0x0d => {
|
||||
s = s.advance(1);
|
||||
continue;
|
||||
}
|
||||
b if b <= 0x7f => {}
|
||||
_ => {
|
||||
let ch = s.chars().next().unwrap();
|
||||
if is_whitespace(ch) {
|
||||
s = s.advance(ch.len_utf8());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
fn block_comment(input: Cursor) -> PResult<&str> {
|
||||
if !input.starts_with("/*") {
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
let mut depth = 0;
|
||||
let bytes = input.as_bytes();
|
||||
let mut i = 0;
|
||||
let upper = bytes.len() - 1;
|
||||
|
||||
while i < upper {
|
||||
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
||||
depth += 1;
|
||||
i += 1; // eat '*'
|
||||
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
|
||||
}
|
||||
i += 1; // eat '/'
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn is_whitespace(ch: char) -> bool {
|
||||
// Rust treats left-to-right mark and right-to-left mark as whitespace
|
||||
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
||||
}
|
||||
|
||||
fn word_break(input: Cursor) -> Result<Cursor, Reject> {
|
||||
match input.chars().next() {
|
||||
Some(ch) if is_ident_continue(ch) => Err(Reject),
|
||||
Some(_) | None => Ok(input),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
|
||||
let mut trees = Vec::new();
|
||||
let mut stack = Vec::new();
|
||||
|
||||
loop {
|
||||
input = skip_whitespace(input);
|
||||
|
||||
if let Ok((rest, tt)) = doc_comment(input) {
|
||||
trees.extend(tt);
|
||||
input = rest;
|
||||
continue;
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
let lo = input.off;
|
||||
|
||||
let first = match input.bytes().next() {
|
||||
Some(first) => first,
|
||||
None => match stack.last() {
|
||||
None => return Ok(TokenStream::from(trees)),
|
||||
#[cfg(span_locations)]
|
||||
Some((lo, _frame)) => {
|
||||
return Err(LexError {
|
||||
span: Span { lo: *lo, hi: *lo },
|
||||
})
|
||||
}
|
||||
#[cfg(not(span_locations))]
|
||||
Some(_frame) => return Err(LexError { span: Span {} }),
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(open_delimiter) = match first {
|
||||
b'(' => Some(Delimiter::Parenthesis),
|
||||
b'[' => Some(Delimiter::Bracket),
|
||||
b'{' => Some(Delimiter::Brace),
|
||||
_ => None,
|
||||
} {
|
||||
input = input.advance(1);
|
||||
let frame = (open_delimiter, trees);
|
||||
#[cfg(span_locations)]
|
||||
let frame = (lo, frame);
|
||||
stack.push(frame);
|
||||
trees = Vec::new();
|
||||
} else if let Some(close_delimiter) = match first {
|
||||
b')' => Some(Delimiter::Parenthesis),
|
||||
b']' => Some(Delimiter::Bracket),
|
||||
b'}' => Some(Delimiter::Brace),
|
||||
_ => None,
|
||||
} {
|
||||
let frame = match stack.pop() {
|
||||
Some(frame) => frame,
|
||||
None => return Err(lex_error(input)),
|
||||
};
|
||||
#[cfg(span_locations)]
|
||||
let (lo, frame) = frame;
|
||||
let (open_delimiter, outer) = frame;
|
||||
if open_delimiter != close_delimiter {
|
||||
return Err(lex_error(input));
|
||||
}
|
||||
input = input.advance(1);
|
||||
let mut g = Group::new(open_delimiter, TokenStream::from(trees));
|
||||
g.set_span(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: input.off,
|
||||
});
|
||||
trees = outer;
|
||||
trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
|
||||
} else {
|
||||
let (rest, mut tt) = match leaf_token(input) {
|
||||
Ok((rest, tt)) => (rest, tt),
|
||||
Err(Reject) => return Err(lex_error(input)),
|
||||
};
|
||||
tt.set_span(crate::Span::_new_stable(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: rest.off,
|
||||
}));
|
||||
trees.push(tt);
|
||||
input = rest;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lex_error(cursor: Cursor) -> LexError {
|
||||
#[cfg(not(span_locations))]
|
||||
let _ = cursor;
|
||||
LexError {
|
||||
span: Span {
|
||||
#[cfg(span_locations)]
|
||||
lo: cursor.off,
|
||||
#[cfg(span_locations)]
|
||||
hi: cursor.off,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn leaf_token(input: Cursor) -> PResult<TokenTree> {
|
||||
if let Ok((input, l)) = literal(input) {
|
||||
// must be parsed before ident
|
||||
Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
|
||||
} else if let Ok((input, p)) = punct(input) {
|
||||
Ok((input, TokenTree::Punct(p)))
|
||||
} else if let Ok((input, i)) = ident(input) {
|
||||
Ok((input, TokenTree::Ident(i)))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn ident(input: Cursor) -> PResult<crate::Ident> {
|
||||
if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"]
|
||||
.iter()
|
||||
.any(|prefix| input.starts_with(prefix))
|
||||
{
|
||||
Err(Reject)
|
||||
} else {
|
||||
ident_any(input)
|
||||
}
|
||||
}
|
||||
|
||||
fn ident_any(input: Cursor) -> PResult<crate::Ident> {
|
||||
let raw = input.starts_with("r#");
|
||||
let rest = input.advance((raw as usize) << 1);
|
||||
|
||||
let (rest, sym) = ident_not_raw(rest)?;
|
||||
|
||||
if !raw {
|
||||
let ident = crate::Ident::new(sym, crate::Span::call_site());
|
||||
return Ok((rest, ident));
|
||||
}
|
||||
|
||||
if sym == "_" {
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
|
||||
Ok((rest, ident))
|
||||
}
|
||||
|
||||
fn ident_not_raw(input: Cursor) -> PResult<&str> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
match chars.next() {
|
||||
Some((_, ch)) if is_ident_start(ch) => {}
|
||||
_ => return Err(Reject),
|
||||
}
|
||||
|
||||
let mut end = input.len();
|
||||
for (i, ch) in chars {
|
||||
if !is_ident_continue(ch) {
|
||||
end = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok((input.advance(end), &input.rest[..end]))
|
||||
}
|
||||
|
||||
pub(crate) fn literal(input: Cursor) -> PResult<Literal> {
|
||||
let rest = literal_nocapture(input)?;
|
||||
let end = input.len() - rest.len();
|
||||
Ok((rest, Literal::_new(input.rest[..end].to_string())))
|
||||
}
|
||||
|
||||
fn literal_nocapture(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(ok) = string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = byte_string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = byte(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = character(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = float(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = int(input) {
|
||||
Ok(ok)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn literal_suffix(input: Cursor) -> Cursor {
|
||||
match ident_not_raw(input) {
|
||||
Ok((input, _)) => input,
|
||||
Err(Reject) => input,
|
||||
}
|
||||
}
|
||||
|
||||
fn string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("\"") {
|
||||
cooked_string(input)
|
||||
} else if let Ok(input) = input.parse("r") {
|
||||
raw_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn cooked_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.char_indices().peekable();
|
||||
|
||||
while let Some((i, ch)) = chars.next() {
|
||||
match ch {
|
||||
'"' => {
|
||||
let input = input.advance(i + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
'\r' => match chars.next() {
|
||||
Some((_, '\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
'\\' => match chars.next() {
|
||||
Some((_, 'x')) => {
|
||||
if !backslash_x_char(&mut chars) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
|
||||
| Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
|
||||
Some((_, 'u')) => {
|
||||
if !backslash_u(&mut chars) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => {
|
||||
let mut last = ch;
|
||||
loop {
|
||||
if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
|
||||
return Err(Reject);
|
||||
}
|
||||
match chars.peek() {
|
||||
Some((_, ch)) if ch.is_whitespace() => {
|
||||
last = *ch;
|
||||
chars.next();
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
_ch => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn byte_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("b\"") {
|
||||
cooked_byte_string(input)
|
||||
} else if let Ok(input) = input.parse("br") {
|
||||
raw_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((offset, b)) = bytes.next() {
|
||||
match b {
|
||||
b'"' => {
|
||||
let input = input.advance(offset + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
b'\\' => match bytes.next() {
|
||||
Some((_, b'x')) => {
|
||||
if !backslash_x_byte(&mut bytes) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
|
||||
| Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
|
||||
Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => {
|
||||
let mut last = b as char;
|
||||
let rest = input.advance(newline + 1);
|
||||
let mut chars = rest.char_indices();
|
||||
loop {
|
||||
if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
|
||||
return Err(Reject);
|
||||
}
|
||||
match chars.next() {
|
||||
Some((_, ch)) if ch.is_whitespace() => last = ch,
|
||||
Some((offset, _)) => {
|
||||
input = rest.advance(offset);
|
||||
bytes = input.bytes().enumerate();
|
||||
break;
|
||||
}
|
||||
None => return Err(Reject),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
b if b < 0x80 => {}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn raw_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.char_indices();
|
||||
let mut n = 0;
|
||||
for (i, ch) in &mut chars {
|
||||
match ch {
|
||||
'"' => {
|
||||
n = i;
|
||||
break;
|
||||
}
|
||||
'#' => {}
|
||||
_ => return Err(Reject),
|
||||
}
|
||||
}
|
||||
while let Some((i, ch)) = chars.next() {
|
||||
match ch {
|
||||
'"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
|
||||
let rest = input.advance(i + 1 + n);
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
'\r' => match chars.next() {
|
||||
Some((_, '\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn byte(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let input = input.parse("b'")?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
let ok = match bytes.next().map(|(_, b)| b) {
|
||||
Some(b'\\') => match bytes.next().map(|(_, b)| b) {
|
||||
Some(b'x') => backslash_x_byte(&mut bytes),
|
||||
Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
|
||||
| Some(b'"') => true,
|
||||
_ => false,
|
||||
},
|
||||
b => b.is_some(),
|
||||
};
|
||||
if !ok {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (offset, _) = bytes.next().ok_or(Reject)?;
|
||||
if !input.chars().as_str().is_char_boundary(offset) {
|
||||
return Err(Reject);
|
||||
}
|
||||
let input = input.advance(offset).parse("'")?;
|
||||
Ok(literal_suffix(input))
|
||||
}
|
||||
|
||||
fn character(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let input = input.parse("'")?;
|
||||
let mut chars = input.char_indices();
|
||||
let ok = match chars.next().map(|(_, ch)| ch) {
|
||||
Some('\\') => match chars.next().map(|(_, ch)| ch) {
|
||||
Some('x') => backslash_x_char(&mut chars),
|
||||
Some('u') => backslash_u(&mut chars),
|
||||
Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
ch => ch.is_some(),
|
||||
};
|
||||
if !ok {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (idx, _) = chars.next().ok_or(Reject)?;
|
||||
let input = input.advance(idx).parse("'")?;
|
||||
Ok(literal_suffix(input))
|
||||
}
|
||||
|
||||
macro_rules! next_ch {
|
||||
($chars:ident @ $pat:pat $(| $rest:pat)*) => {
|
||||
match $chars.next() {
|
||||
Some((_, ch)) => match ch {
|
||||
$pat $(| $rest)* => ch,
|
||||
_ => return false,
|
||||
},
|
||||
None => return false,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn backslash_x_char<I>(chars: &mut I) -> bool
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '0'..='7');
|
||||
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
true
|
||||
}
|
||||
|
||||
fn backslash_x_byte<I>(chars: &mut I) -> bool
|
||||
where
|
||||
I: Iterator<Item = (usize, u8)>,
|
||||
{
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
true
|
||||
}
|
||||
|
||||
fn backslash_u<I>(chars: &mut I) -> bool
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '{');
|
||||
let mut value = 0;
|
||||
let mut len = 0;
|
||||
for (_, ch) in chars {
|
||||
let digit = match ch {
|
||||
'0'..='9' => ch as u8 - b'0',
|
||||
'a'..='f' => 10 + ch as u8 - b'a',
|
||||
'A'..='F' => 10 + ch as u8 - b'A',
|
||||
'_' if len > 0 => continue,
|
||||
'}' if len > 0 => return char::from_u32(value).is_some(),
|
||||
_ => return false,
|
||||
};
|
||||
if len == 6 {
|
||||
return false;
|
||||
}
|
||||
value *= 0x10;
|
||||
value += u32::from(digit);
|
||||
len += 1;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn float(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut rest = float_digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = ident_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn float_digits(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.chars().peekable();
|
||||
match chars.next() {
|
||||
Some(ch) if ch >= '0' && ch <= '9' => {}
|
||||
_ => return Err(Reject),
|
||||
}
|
||||
|
||||
let mut len = 1;
|
||||
let mut has_dot = false;
|
||||
let mut has_exp = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'0'..='9' | '_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
'.' => {
|
||||
if has_dot {
|
||||
break;
|
||||
}
|
||||
chars.next();
|
||||
if chars
|
||||
.peek()
|
||||
.map_or(false, |&ch| ch == '.' || is_ident_start(ch))
|
||||
{
|
||||
return Err(Reject);
|
||||
}
|
||||
len += 1;
|
||||
has_dot = true;
|
||||
}
|
||||
'e' | 'E' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp = true;
|
||||
break;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
if !(has_dot || has_exp) {
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
if has_exp {
|
||||
let token_before_exp = if has_dot {
|
||||
Ok(input.advance(len - 1))
|
||||
} else {
|
||||
Err(Reject)
|
||||
};
|
||||
let mut has_sign = false;
|
||||
let mut has_exp_value = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'+' | '-' => {
|
||||
if has_exp_value {
|
||||
break;
|
||||
}
|
||||
if has_sign {
|
||||
return token_before_exp;
|
||||
}
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_sign = true;
|
||||
}
|
||||
'0'..='9' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp_value = true;
|
||||
}
|
||||
'_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
if !has_exp_value {
|
||||
return token_before_exp;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(input.advance(len))
|
||||
}
|
||||
|
||||
fn int(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut rest = digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = ident_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn digits(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let base = if input.starts_with("0x") {
|
||||
input = input.advance(2);
|
||||
16
|
||||
} else if input.starts_with("0o") {
|
||||
input = input.advance(2);
|
||||
8
|
||||
} else if input.starts_with("0b") {
|
||||
input = input.advance(2);
|
||||
2
|
||||
} else {
|
||||
10
|
||||
};
|
||||
|
||||
let mut len = 0;
|
||||
let mut empty = true;
|
||||
for b in input.bytes() {
|
||||
match b {
|
||||
b'0'..=b'9' => {
|
||||
let digit = (b - b'0') as u64;
|
||||
if digit >= base {
|
||||
return Err(Reject);
|
||||
}
|
||||
}
|
||||
b'a'..=b'f' => {
|
||||
let digit = 10 + (b - b'a') as u64;
|
||||
if digit >= base {
|
||||
break;
|
||||
}
|
||||
}
|
||||
b'A'..=b'F' => {
|
||||
let digit = 10 + (b - b'A') as u64;
|
||||
if digit >= base {
|
||||
break;
|
||||
}
|
||||
}
|
||||
b'_' => {
|
||||
if empty && base == 10 {
|
||||
return Err(Reject);
|
||||
}
|
||||
len += 1;
|
||||
continue;
|
||||
}
|
||||
_ => break,
|
||||
};
|
||||
len += 1;
|
||||
empty = false;
|
||||
}
|
||||
if empty {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok(input.advance(len))
|
||||
}
|
||||
}
|
||||
|
||||
fn punct(input: Cursor) -> PResult<Punct> {
|
||||
let (rest, ch) = punct_char(input)?;
|
||||
if ch == '\'' {
|
||||
if ident_any(rest)?.0.starts_with("'") {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok((rest, Punct::new('\'', Spacing::Joint)))
|
||||
}
|
||||
} else {
|
||||
let kind = match punct_char(rest) {
|
||||
Ok(_) => Spacing::Joint,
|
||||
Err(Reject) => Spacing::Alone,
|
||||
};
|
||||
Ok((rest, Punct::new(ch, kind)))
|
||||
}
|
||||
}
|
||||
|
||||
fn punct_char(input: Cursor) -> PResult<char> {
|
||||
if input.starts_with("//") || input.starts_with("/*") {
|
||||
// Do not accept `/` of a comment as a punct.
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
let mut chars = input.chars();
|
||||
let first = match chars.next() {
|
||||
Some(ch) => ch,
|
||||
None => {
|
||||
return Err(Reject);
|
||||
}
|
||||
};
|
||||
let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
|
||||
if recognized.contains(first) {
|
||||
Ok((input.advance(first.len_utf8()), first))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
|
||||
#[cfg(span_locations)]
|
||||
let lo = input.off;
|
||||
let (rest, (comment, inner)) = doc_comment_contents(input)?;
|
||||
let span = crate::Span::_new_stable(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: rest.off,
|
||||
});
|
||||
|
||||
let mut scan_for_bare_cr = comment;
|
||||
while let Some(cr) = scan_for_bare_cr.find('\r') {
|
||||
let rest = &scan_for_bare_cr[cr + 1..];
|
||||
if !rest.starts_with('\n') {
|
||||
return Err(Reject);
|
||||
}
|
||||
scan_for_bare_cr = rest;
|
||||
}
|
||||
|
||||
let mut trees = Vec::new();
|
||||
trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
|
||||
if inner {
|
||||
trees.push(Punct::new('!', Spacing::Alone).into());
|
||||
}
|
||||
let mut stream = vec![
|
||||
TokenTree::Ident(crate::Ident::new("doc", span)),
|
||||
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
|
||||
TokenTree::Literal(crate::Literal::string(comment)),
|
||||
];
|
||||
for tt in &mut stream {
|
||||
tt.set_span(span);
|
||||
}
|
||||
let group = Group::new(Delimiter::Bracket, TokenStream::from(stream));
|
||||
trees.push(crate::Group::_new_stable(group).into());
|
||||
for tt in &mut trees {
|
||||
tt.set_span(span);
|
||||
}
|
||||
Ok((rest, trees))
|
||||
}
|
||||
|
||||
fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
|
||||
if input.starts_with("//!") {
|
||||
let input = input.advance(3);
|
||||
let (input, s) = take_until_newline_or_eof(input);
|
||||
Ok((input, (s, true)))
|
||||
} else if input.starts_with("/*!") {
|
||||
let (input, s) = block_comment(input)?;
|
||||
Ok((input, (&s[3..s.len() - 2], true)))
|
||||
} else if input.starts_with("///") {
|
||||
let input = input.advance(3);
|
||||
if input.starts_with("/") {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (input, s) = take_until_newline_or_eof(input);
|
||||
Ok((input, (s, false)))
|
||||
} else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
|
||||
let (input, s) = block_comment(input)?;
|
||||
Ok((input, (&s[3..s.len() - 2], false)))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
|
||||
let chars = input.char_indices();
|
||||
|
||||
for (i, ch) in chars {
|
||||
if ch == '\n' {
|
||||
return (input.advance(i), &input.rest[..i]);
|
||||
} else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
|
||||
return (input.advance(i + 1), &input.rest[..i]);
|
||||
}
|
||||
}
|
||||
|
||||
(input.advance(input.len()), input.rest)
|
||||
}
|
||||
974
zeroidc/vendor/proc-macro2/src/wrapper.rs
vendored
Normal file
974
zeroidc/vendor/proc-macro2/src/wrapper.rs
vendored
Normal file
@@ -0,0 +1,974 @@
|
||||
use crate::detection::inside_proc_macro;
|
||||
use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::iter::FromIterator;
|
||||
use std::ops::RangeBounds;
|
||||
use std::panic;
|
||||
#[cfg(super_unstable)]
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum TokenStream {
|
||||
Compiler(DeferredTokenStream),
|
||||
Fallback(fallback::TokenStream),
|
||||
}
|
||||
|
||||
// Work around https://github.com/rust-lang/rust/issues/65080.
|
||||
// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
|
||||
// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
|
||||
// late as possible to batch together consecutive uses of the Extend impl.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct DeferredTokenStream {
|
||||
stream: proc_macro::TokenStream,
|
||||
extra: Vec<proc_macro::TokenTree>,
|
||||
}
|
||||
|
||||
pub(crate) enum LexError {
|
||||
Compiler(proc_macro::LexError),
|
||||
Fallback(fallback::LexError),
|
||||
}
|
||||
|
||||
impl LexError {
|
||||
fn call_site() -> Self {
|
||||
LexError::Fallback(fallback::LexError {
|
||||
span: fallback::Span::call_site(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn mismatch() -> ! {
|
||||
panic!("stable/nightly mismatch")
|
||||
}
|
||||
|
||||
impl DeferredTokenStream {
|
||||
fn new(stream: proc_macro::TokenStream) -> Self {
|
||||
DeferredTokenStream {
|
||||
stream,
|
||||
extra: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.stream.is_empty() && self.extra.is_empty()
|
||||
}
|
||||
|
||||
fn evaluate_now(&mut self) {
|
||||
// If-check provides a fast short circuit for the common case of `extra`
|
||||
// being empty, which saves a round trip over the proc macro bridge.
|
||||
// Improves macro expansion time in winrt by 6% in debug mode.
|
||||
if !self.extra.is_empty() {
|
||||
self.stream.extend(self.extra.drain(..));
|
||||
}
|
||||
}
|
||||
|
||||
fn into_token_stream(mut self) -> proc_macro::TokenStream {
|
||||
self.evaluate_now();
|
||||
self.stream
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new() -> Self {
|
||||
if inside_proc_macro() {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
|
||||
} else {
|
||||
TokenStream::Fallback(fallback::TokenStream::new())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => tts.is_empty(),
|
||||
TokenStream::Fallback(tts) => tts.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(s) => s.into_token_stream(),
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_stable(self) -> fallback::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(_) => mismatch(),
|
||||
TokenStream::Fallback(s) => s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TokenStream {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
if inside_proc_macro() {
|
||||
Ok(TokenStream::Compiler(DeferredTokenStream::new(
|
||||
proc_macro_parse(src)?,
|
||||
)))
|
||||
} else {
|
||||
Ok(TokenStream::Fallback(src.parse()?))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Work around https://github.com/rust-lang/rust/issues/58736.
|
||||
fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
|
||||
let result = panic::catch_unwind(|| src.parse().map_err(LexError::Compiler));
|
||||
result.unwrap_or_else(|_| Err(LexError::call_site()))
|
||||
}
|
||||
|
||||
impl Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
|
||||
TokenStream::Fallback(tts) => Display::fmt(tts, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> TokenStream {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(inner))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> proc_macro::TokenStream {
|
||||
match inner {
|
||||
TokenStream::Compiler(inner) => inner.into_token_stream(),
|
||||
TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::TokenStream> for TokenStream {
|
||||
fn from(inner: fallback::TokenStream) -> TokenStream {
|
||||
TokenStream::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
// Assumes inside_proc_macro().
|
||||
fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
|
||||
match token {
|
||||
TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
|
||||
TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
Spacing::Joint => proc_macro::Spacing::Joint,
|
||||
Spacing::Alone => proc_macro::Spacing::Alone,
|
||||
};
|
||||
let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
|
||||
punct.set_span(tt.span().inner.unwrap_nightly());
|
||||
punct.into()
|
||||
}
|
||||
TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
|
||||
TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(token: TokenTree) -> TokenStream {
|
||||
if inside_proc_macro() {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
|
||||
} else {
|
||||
TokenStream::Fallback(token.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||
if inside_proc_macro() {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(
|
||||
trees.into_iter().map(into_compiler_token).collect(),
|
||||
))
|
||||
} else {
|
||||
TokenStream::Fallback(trees.into_iter().collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut streams = streams.into_iter();
|
||||
match streams.next() {
|
||||
Some(TokenStream::Compiler(mut first)) => {
|
||||
first.evaluate_now();
|
||||
first.stream.extend(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s.into_token_stream(),
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}));
|
||||
TokenStream::Compiler(first)
|
||||
}
|
||||
Some(TokenStream::Fallback(mut first)) => {
|
||||
first.extend(streams.map(|s| match s {
|
||||
TokenStream::Fallback(s) => s,
|
||||
TokenStream::Compiler(_) => mismatch(),
|
||||
}));
|
||||
TokenStream::Fallback(first)
|
||||
}
|
||||
None => TokenStream::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
// Here is the reason for DeferredTokenStream.
|
||||
for token in stream {
|
||||
tts.extra.push(into_compiler_token(token));
|
||||
}
|
||||
}
|
||||
TokenStream::Fallback(tts) => tts.extend(stream),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenStream> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
tts.evaluate_now();
|
||||
tts.stream
|
||||
.extend(streams.into_iter().map(TokenStream::unwrap_nightly));
|
||||
}
|
||||
TokenStream::Fallback(tts) => {
|
||||
tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
|
||||
TokenStream::Fallback(tts) => Debug::fmt(tts, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LexError {
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
LexError::Compiler(_) => Span::call_site(),
|
||||
LexError::Fallback(e) => Span::Fallback(e.span()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::LexError> for LexError {
|
||||
fn from(e: proc_macro::LexError) -> LexError {
|
||||
LexError::Compiler(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::LexError> for LexError {
|
||||
fn from(e: fallback::LexError) -> LexError {
|
||||
LexError::Fallback(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
LexError::Compiler(e) => Debug::fmt(e, f),
|
||||
LexError::Fallback(e) => Debug::fmt(e, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
#[cfg(not(no_lexerror_display))]
|
||||
LexError::Compiler(e) => Display::fmt(e, f),
|
||||
#[cfg(no_lexerror_display)]
|
||||
LexError::Compiler(_e) => Display::fmt(
|
||||
&fallback::LexError {
|
||||
span: fallback::Span::call_site(),
|
||||
},
|
||||
f,
|
||||
),
|
||||
LexError::Fallback(e) => Display::fmt(e, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum TokenTreeIter {
|
||||
Compiler(proc_macro::token_stream::IntoIter),
|
||||
Fallback(fallback::TokenTreeIter),
|
||||
}
|
||||
|
||||
impl IntoIterator for TokenStream {
|
||||
type Item = TokenTree;
|
||||
type IntoIter = TokenTreeIter;
|
||||
|
||||
fn into_iter(self) -> TokenTreeIter {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
TokenTreeIter::Compiler(tts.into_token_stream().into_iter())
|
||||
}
|
||||
TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for TokenTreeIter {
|
||||
type Item = TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<TokenTree> {
|
||||
let token = match self {
|
||||
TokenTreeIter::Compiler(iter) => iter.next()?,
|
||||
TokenTreeIter::Fallback(iter) => return iter.next(),
|
||||
};
|
||||
Some(match token {
|
||||
proc_macro::TokenTree::Group(tt) => crate::Group::_new(Group::Compiler(tt)).into(),
|
||||
proc_macro::TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
proc_macro::Spacing::Joint => Spacing::Joint,
|
||||
proc_macro::Spacing::Alone => Spacing::Alone,
|
||||
};
|
||||
let mut o = Punct::new(tt.as_char(), spacing);
|
||||
o.set_span(crate::Span::_new(Span::Compiler(tt.span())));
|
||||
o.into()
|
||||
}
|
||||
proc_macro::TokenTree::Ident(s) => crate::Ident::_new(Ident::Compiler(s)).into(),
|
||||
proc_macro::TokenTree::Literal(l) => crate::Literal::_new(Literal::Compiler(l)).into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
match self {
|
||||
TokenTreeIter::Compiler(tts) => tts.size_hint(),
|
||||
TokenTreeIter::Fallback(tts) => tts.size_hint(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for TokenTreeIter {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("TokenTreeIter").finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
#[cfg(super_unstable)]
|
||||
pub(crate) enum SourceFile {
|
||||
Compiler(proc_macro::SourceFile),
|
||||
Fallback(fallback::SourceFile),
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
impl SourceFile {
|
||||
fn nightly(sf: proc_macro::SourceFile) -> Self {
|
||||
SourceFile::Compiler(sf)
|
||||
}
|
||||
|
||||
/// Get the path to this source file as a string.
|
||||
pub fn path(&self) -> PathBuf {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.path(),
|
||||
SourceFile::Fallback(a) => a.path(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_real(&self) -> bool {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.is_real(),
|
||||
SourceFile::Fallback(a) => a.is_real(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
impl Debug for SourceFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => Debug::fmt(a, f),
|
||||
SourceFile::Fallback(a) => Debug::fmt(a, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub(crate) struct LineColumn {
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub(crate) enum Span {
|
||||
Compiler(proc_macro::Span),
|
||||
Fallback(fallback::Span),
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub fn call_site() -> Self {
|
||||
if inside_proc_macro() {
|
||||
Span::Compiler(proc_macro::Span::call_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(no_hygiene))]
|
||||
pub fn mixed_site() -> Self {
|
||||
if inside_proc_macro() {
|
||||
Span::Compiler(proc_macro::Span::mixed_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::mixed_site())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn def_site() -> Self {
|
||||
if inside_proc_macro() {
|
||||
Span::Compiler(proc_macro::Span::def_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::def_site())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolved_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
#[cfg(not(no_hygiene))]
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
|
||||
|
||||
// Name resolution affects semantics, but location is only cosmetic
|
||||
#[cfg(no_hygiene)]
|
||||
(Span::Compiler(_), Span::Compiler(_)) => other,
|
||||
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn located_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
#[cfg(not(no_hygiene))]
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
|
||||
|
||||
// Name resolution affects semantics, but location is only cosmetic
|
||||
#[cfg(no_hygiene)]
|
||||
(Span::Compiler(_), Span::Compiler(_)) => *self,
|
||||
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwrap(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
match self {
|
||||
Span::Compiler(s) => SourceFile::nightly(s.source_file()),
|
||||
Span::Fallback(s) => SourceFile::Fallback(s.source_file()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
let ret = match (self, other) {
|
||||
#[cfg(proc_macro_span)]
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?),
|
||||
_ => return None,
|
||||
};
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn eq(&self, other: &Span) -> bool {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => a.eq(b),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => a.eq(b),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::Span> for crate::Span {
|
||||
fn from(proc_span: proc_macro::Span) -> crate::Span {
|
||||
crate::Span::_new(Span::Compiler(proc_span))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Span> for Span {
|
||||
fn from(inner: fallback::Span) -> Span {
|
||||
Span::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Span {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Span::Compiler(s) => Debug::fmt(s, f),
|
||||
Span::Fallback(s) => Debug::fmt(s, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
||||
match span {
|
||||
Span::Compiler(s) => {
|
||||
debug.field("span", &s);
|
||||
}
|
||||
Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum Group {
|
||||
Compiler(proc_macro::Group),
|
||||
Fallback(fallback::Group),
|
||||
}
|
||||
|
||||
impl Group {
|
||||
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
|
||||
match stream {
|
||||
TokenStream::Compiler(tts) => {
|
||||
let delimiter = match delimiter {
|
||||
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
|
||||
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
|
||||
Delimiter::Brace => proc_macro::Delimiter::Brace,
|
||||
Delimiter::None => proc_macro::Delimiter::None,
|
||||
};
|
||||
Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream()))
|
||||
}
|
||||
TokenStream::Fallback(stream) => {
|
||||
Group::Fallback(fallback::Group::new(delimiter, stream))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delimiter(&self) -> Delimiter {
|
||||
match self {
|
||||
Group::Compiler(g) => match g.delimiter() {
|
||||
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
|
||||
proc_macro::Delimiter::Bracket => Delimiter::Bracket,
|
||||
proc_macro::Delimiter::Brace => Delimiter::Brace,
|
||||
proc_macro::Delimiter::None => Delimiter::None,
|
||||
},
|
||||
Group::Fallback(g) => g.delimiter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
match self {
|
||||
Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())),
|
||||
Group::Fallback(g) => TokenStream::Fallback(g.stream()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span_open(&self) -> Span {
|
||||
match self {
|
||||
#[cfg(not(no_group_open_close))]
|
||||
Group::Compiler(g) => Span::Compiler(g.span_open()),
|
||||
#[cfg(no_group_open_close)]
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_open()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span_close(&self) -> Span {
|
||||
match self {
|
||||
#[cfg(not(no_group_open_close))]
|
||||
Group::Compiler(g) => Span::Compiler(g.span_close()),
|
||||
#[cfg(no_group_open_close)]
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_close()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Group::Compiler(g), Span::Compiler(s)) => g.set_span(s),
|
||||
(Group::Fallback(g), Span::Fallback(s)) => g.set_span(s),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Group {
|
||||
match self {
|
||||
Group::Compiler(g) => g,
|
||||
Group::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Group> for Group {
|
||||
fn from(g: fallback::Group) -> Self {
|
||||
Group::Fallback(g)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => Display::fmt(group, formatter),
|
||||
Group::Fallback(group) => Display::fmt(group, formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => Debug::fmt(group, formatter),
|
||||
Group::Fallback(group) => Debug::fmt(group, formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum Ident {
|
||||
Compiler(proc_macro::Ident),
|
||||
Fallback(fallback::Ident),
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
pub fn new(string: &str, span: Span) -> Self {
|
||||
match span {
|
||||
Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_raw(string: &str, span: Span) -> Self {
|
||||
match span {
|
||||
Span::Compiler(s) => {
|
||||
let p: proc_macro::TokenStream = string.parse().unwrap();
|
||||
let ident = match p.into_iter().next() {
|
||||
Some(proc_macro::TokenTree::Ident(mut i)) => {
|
||||
i.set_span(s);
|
||||
i
|
||||
}
|
||||
_ => panic!(),
|
||||
};
|
||||
Ident::Compiler(ident)
|
||||
}
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Ident::Compiler(t) => Span::Compiler(t.span()),
|
||||
Ident::Fallback(t) => Span::Fallback(t.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s),
|
||||
(Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Ident {
|
||||
match self {
|
||||
Ident::Compiler(s) => s,
|
||||
Ident::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Ident {
|
||||
fn eq(&self, other: &Ident) -> bool {
|
||||
match (self, other) {
|
||||
(Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(),
|
||||
(Ident::Fallback(t), Ident::Fallback(o)) => t == o,
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq<T> for Ident
|
||||
where
|
||||
T: ?Sized + AsRef<str>,
|
||||
{
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
let other = other.as_ref();
|
||||
match self {
|
||||
Ident::Compiler(t) => t.to_string() == other,
|
||||
Ident::Fallback(t) => t == other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => Display::fmt(t, f),
|
||||
Ident::Fallback(t) => Display::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => Debug::fmt(t, f),
|
||||
Ident::Fallback(t) => Debug::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum Literal {
|
||||
Compiler(proc_macro::Literal),
|
||||
Fallback(fallback::Literal),
|
||||
}
|
||||
|
||||
macro_rules! suffixed_numbers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
macro_rules! unsuffixed_integers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
pub unsafe fn from_str_unchecked(repr: &str) -> Self {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(compiler_literal_from_str(repr).expect("invalid literal"))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::from_str_unchecked(repr))
|
||||
}
|
||||
}
|
||||
|
||||
suffixed_numbers! {
|
||||
u8_suffixed => u8,
|
||||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
unsuffixed_integers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
i128_unsuffixed => i128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn f64_unsuffixed(f: f64) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string(t: &str) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::string(t))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::string(t))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn character(t: char) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::character(t))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::character(t))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn byte_string(bytes: &[u8]) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::byte_string(bytes))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::byte_string(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Literal::Compiler(lit) => Span::Compiler(lit.span()),
|
||||
Literal::Fallback(lit) => Span::Fallback(lit.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s),
|
||||
(Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Literal::Compiler(_lit) => None,
|
||||
Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Literal {
|
||||
match self {
|
||||
Literal::Compiler(s) => s,
|
||||
Literal::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Literal> for Literal {
|
||||
fn from(s: fallback::Literal) -> Literal {
|
||||
Literal::Fallback(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Literal {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(repr: &str) -> Result<Self, Self::Err> {
|
||||
if inside_proc_macro() {
|
||||
compiler_literal_from_str(repr).map(Literal::Compiler)
|
||||
} else {
|
||||
let literal = fallback::Literal::from_str(repr)?;
|
||||
Ok(Literal::Fallback(literal))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn compiler_literal_from_str(repr: &str) -> Result<proc_macro::Literal, LexError> {
|
||||
#[cfg(not(no_literal_from_str))]
|
||||
{
|
||||
proc_macro::Literal::from_str(repr).map_err(LexError::Compiler)
|
||||
}
|
||||
#[cfg(no_literal_from_str)]
|
||||
{
|
||||
let tokens = proc_macro_parse(repr)?;
|
||||
let mut iter = tokens.into_iter();
|
||||
if let (Some(proc_macro::TokenTree::Literal(literal)), None) = (iter.next(), iter.next()) {
|
||||
if literal.to_string().len() == repr.len() {
|
||||
return Ok(literal);
|
||||
}
|
||||
}
|
||||
Err(LexError::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => Display::fmt(t, f),
|
||||
Literal::Fallback(t) => Display::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => Debug::fmt(t, f),
|
||||
Literal::Fallback(t) => Debug::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user