Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Restrict PartialOrd comparison to only tokens in the same buffer #1237

Merged
merged 2 commits into from Oct 20, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
41 changes: 31 additions & 10 deletions src/buffer.rs
Expand Up @@ -26,7 +26,8 @@ enum Entry {
Ident(Ident),
Punct(Punct),
Literal(Literal),
End,
// End entries contain the offset (negative) to the start of the buffer.
End(isize),
}

/// A buffer that can be efficiently traversed multiple times, unlike
Expand All @@ -49,10 +50,10 @@ impl TokenBuffer {
TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)),
TokenTree::Group(group) => {
let group_start_index = entries.len();
entries.push(Entry::End); // we replace this below
entries.push(Entry::End(0)); // we replace this below
Self::recursive_new(entries, group.stream());
let group_end_index = entries.len();
entries.push(Entry::End);
entries.push(Entry::End(-(group_end_index as isize)));
let group_end_offset = group_end_index - group_start_index;
entries[group_start_index] = Entry::Group(group, group_end_offset);
}
Expand All @@ -78,7 +79,7 @@ impl TokenBuffer {
pub fn new2(stream: TokenStream) -> Self {
let mut entries = Vec::new();
Self::recursive_new(&mut entries, stream);
entries.push(Entry::End);
entries.push(Entry::End(-(entries.len() as isize)));
Self {
entries: entries.into_boxed_slice(),
}
Expand Down Expand Up @@ -128,7 +129,7 @@ impl<'a> Cursor<'a> {
// object in global storage.
struct UnsafeSyncEntry(Entry);
unsafe impl Sync for UnsafeSyncEntry {}
static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End);
static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0));

Cursor {
ptr: &EMPTY_ENTRY.0,
Expand All @@ -145,7 +146,7 @@ impl<'a> Cursor<'a> {
// past it, unless `ptr == scope`, which means that we're at the edge of
// our cursor's scope. We should only have `ptr != scope` at the exit
// from None-delimited groups entered with `ignore_none`.
while let Entry::End = *ptr {
while let Entry::End(_) = *ptr {
if ptr == scope {
break;
}
Expand Down Expand Up @@ -293,7 +294,7 @@ impl<'a> Cursor<'a> {
Entry::Literal(literal) => (literal.clone().into(), 1),
Entry::Ident(ident) => (ident.clone().into(), 1),
Entry::Punct(punct) => (punct.clone().into(), 1),
Entry::End => return None,
Entry::End(_) => return None,
};

let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) };
Expand All @@ -308,7 +309,7 @@ impl<'a> Cursor<'a> {
Entry::Literal(literal) => literal.span(),
Entry::Ident(ident) => ident.span(),
Entry::Punct(punct) => punct.span(),
Entry::End => Span::call_site(),
Entry::End(_) => Span::call_site(),
}
}

Expand All @@ -318,7 +319,7 @@ impl<'a> Cursor<'a> {
/// This method treats `'lifetimes` as a single token.
pub(crate) fn skip(self) -> Option<Cursor<'a>> {
let len = match self.entry() {
Entry::End => return None,
Entry::End(_) => return None,

// Treat lifetimes as a single tt for the purposes of 'skip'.
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
Expand Down Expand Up @@ -354,14 +355,34 @@ impl<'a> PartialEq for Cursor<'a> {

impl<'a> PartialOrd for Cursor<'a> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.ptr.partial_cmp(&other.ptr)
if same_buffer(*self, *other) {
Some(self.ptr.cmp(&other.ptr))
} else {
None
}
}
}

pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
a.scope == b.scope
}

pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool {
unsafe {
match (&*a.scope, &*b.scope) {
(Entry::End(a_offset), Entry::End(b_offset)) => {
a.scope.offset(*a_offset) == b.scope.offset(*b_offset)
}
_ => unreachable!(),
}
}
}

#[cfg(any(feature = "full", feature = "derive"))]
pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering {
a.ptr.cmp(&b.ptr)
}

pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
match cursor.entry() {
Entry::Group(group, _) => group.span_open(),
Expand Down
1 change: 1 addition & 0 deletions src/lib.rs
Expand Up @@ -257,6 +257,7 @@
clippy::bool_to_int_with_if,
clippy::cast_lossless,
clippy::cast_possible_truncation,
clippy::cast_possible_wrap,
clippy::cast_ptr_alignment,
clippy::default_trait_access,
clippy::doc_markdown,
Expand Down
5 changes: 4 additions & 1 deletion src/verbatim.rs
@@ -1,15 +1,18 @@
use crate::parse::{ParseBuffer, ParseStream};
use proc_macro2::{Delimiter, TokenStream};
use std::cmp::Ordering;
use std::iter;

pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
let end = end.cursor();
let mut cursor = begin.cursor();
assert!(crate::buffer::same_buffer(end, cursor));

let mut tokens = TokenStream::new();
while cursor != end {
let (tt, next) = cursor.token_tree().unwrap();

if end < next {
if crate::buffer::cmp_assuming_same_buffer(end, next) == Ordering::Less {
// A syntax node can cross the boundary of a None-delimited group
// due to such groups being transparent to the parser in most cases.
// Any time this occurs the group is known to be semantically
Expand Down