Merge patch series "`syn` support"
This patch series introduces support for `syn` (and its dependencies):
Syn is a parsing library for parsing a stream of Rust tokens into a
syntax tree of Rust source code.
Currently this library is geared toward use in Rust procedural
macros, but contains some APIs that may be useful more generally.
It is the most downloaded Rust crate (according to crates.io), and it
is also used by the Rust compiler itself. Having such support allows to
greatly simplify writing complex macros such as `pin-init`. We will use
it in the `macros` crate too.
Benno has already prepared the `pin-init` version based on this, and on
top of that, we will be able to simplify the `macros` crate too. I think
Jesung is working on updating the `TryFrom` and `Into` upcoming derive
macros to use `syn` too.
The series starts with a few preparation commits (two fixes were already
merged in mainline that were discovered by this series), then each crate
is added. Finally, support for using the new crates from our `macros`
crate is introduced.
This has been a long time coming, e.g. even before Rust for Linux was
merged into the Linux kernel, Gary and Benno have wanted to use `syn`.
The first iterations of this, from 2022 and 2023 (with `serde` too,
another popular crate), are at:
https://github.com/Rust-for-Linux/linux/pull/910
https://github.com/Rust-for-Linux/linux/pull/1007
After those, we considered picking these from the distributions where
possible. However, after discussing it, it is not really worth the
complexity: vendoring makes things less complex and is less fragile.
In particular, we avoid having to support and test several versions,
we avoid having to introduce Cargo just to properly fetch the right
versions from the registry, we can easily customize the crates if needed
(e.g. dropping the `unicode_idents` dependency like it is done in this
series) and we simplify the configuration of the build for users for
which the "default" paths/registries would not have worked.
Moreover, nowadays, the ~57k lines introduced are not that much compared
to years ago (it dwarfed the actual Rust kernel code). Moreover, back
then it wasn't clear the Rust experiment would be a success, so it would
have been a bit pointless/risky to add many lines for nothing. Our macro
needs were also smaller in the early days.
So, finally, in Kangrejos 2025 we discussed going with the original,
simpler approach. Thus here it is the result.
There should not be many updates needed for these, and even if there
are, they should not be too big, e.g. +7k -3k lines across the 3 crates
in the last year.
Note that `syn` does not have all the features enabled, since we do not
need them so far, but they can easily be enabled just adding them to the
list.
Link: https://patch.msgid.link/20251124151837.2184382-1-ojeda@kernel.org
Signed-off-by: Miguel Ojeda <ojeda@kernel.org>
pull/1354/merge
commit
54e3eae855
|
|
@ -41,6 +41,7 @@
|
|||
*.o.*
|
||||
*.patch
|
||||
*.pyc
|
||||
*.rlib
|
||||
*.rmeta
|
||||
*.rpm
|
||||
*.rsi
|
||||
|
|
|
|||
7
Makefile
7
Makefile
|
|
@ -1826,10 +1826,17 @@ rusttest: prepare
|
|||
$(Q)$(MAKE) $(build)=rust $@
|
||||
|
||||
# Formatting targets
|
||||
#
|
||||
# Generated files as well as vendored crates are skipped.
|
||||
PHONY += rustfmt rustfmtcheck
|
||||
|
||||
rustfmt:
|
||||
$(Q)find $(srctree) $(RCS_FIND_IGNORE) \
|
||||
\( \
|
||||
-path $(srctree)/rust/proc-macro2 \
|
||||
-o -path $(srctree)/rust/quote \
|
||||
-o -path $(srctree)/rust/syn \
|
||||
\) -prune -o \
|
||||
-type f -a -name '*.rs' -a ! -name '*generated*' -print \
|
||||
| xargs $(RUSTFMT) $(rustfmt_flags)
|
||||
|
||||
|
|
|
|||
147
rust/Makefile
147
rust/Makefile
|
|
@ -27,6 +27,8 @@ endif
|
|||
|
||||
obj-$(CONFIG_RUST) += exports.o
|
||||
|
||||
always-$(CONFIG_RUST) += libproc_macro2.rlib libquote.rlib libsyn.rlib
|
||||
|
||||
always-$(CONFIG_RUST_KERNEL_DOCTESTS) += doctests_kernel_generated.rs
|
||||
always-$(CONFIG_RUST_KERNEL_DOCTESTS) += doctests_kernel_generated_kunit.c
|
||||
|
||||
|
|
@ -60,11 +62,61 @@ rustdoc_test_quiet=--test-args -q
|
|||
rustdoc_test_kernel_quiet=>/dev/null
|
||||
endif
|
||||
|
||||
core-cfgs = \
|
||||
--cfg no_fp_fmt_parse
|
||||
cfgs-to-flags = $(patsubst %,--cfg='%',$1)
|
||||
|
||||
core-cfgs := \
|
||||
no_fp_fmt_parse
|
||||
|
||||
core-edition := $(if $(call rustc-min-version,108700),2024,2021)
|
||||
|
||||
core-skip_flags := \
|
||||
--edition=2021 \
|
||||
-Wunreachable_pub \
|
||||
-Wrustdoc::unescaped_backticks
|
||||
|
||||
core-flags := \
|
||||
--edition=$(core-edition) \
|
||||
$(call cfgs-to-flags,$(core-cfgs))
|
||||
|
||||
proc_macro2-cfgs := \
|
||||
feature="proc-macro" \
|
||||
wrap_proc_macro \
|
||||
$(if $(call rustc-min-version,108800),proc_macro_span_file proc_macro_span_location)
|
||||
|
||||
# Stable since Rust 1.79.0: `feature(proc_macro_byte_character,proc_macro_c_str_literals)`.
|
||||
proc_macro2-flags := \
|
||||
--cap-lints=allow \
|
||||
-Zcrate-attr='feature(proc_macro_byte_character,proc_macro_c_str_literals)' \
|
||||
$(call cfgs-to-flags,$(proc_macro2-cfgs))
|
||||
|
||||
quote-cfgs := \
|
||||
feature="proc-macro"
|
||||
|
||||
quote-skip_flags := \
|
||||
--edition=2021
|
||||
|
||||
quote-flags := \
|
||||
--edition=2018 \
|
||||
--cap-lints=allow \
|
||||
--extern proc_macro2 \
|
||||
$(call cfgs-to-flags,$(quote-cfgs))
|
||||
|
||||
# `extra-traits`, `fold` and `visit` may be enabled if needed.
|
||||
syn-cfgs := \
|
||||
feature="clone-impls" \
|
||||
feature="derive" \
|
||||
feature="full" \
|
||||
feature="parsing" \
|
||||
feature="printing" \
|
||||
feature="proc-macro" \
|
||||
feature="visit-mut"
|
||||
|
||||
syn-flags := \
|
||||
--cap-lints=allow \
|
||||
--extern proc_macro2 \
|
||||
--extern quote \
|
||||
$(call cfgs-to-flags,$(syn-cfgs))
|
||||
|
||||
# `rustdoc` did not save the target modifiers, thus workaround for
|
||||
# the time being (https://github.com/rust-lang/rust/issues/144521).
|
||||
rustdoc_modifiers_workaround := $(if $(call rustc-min-version,108800),-Cunsafe-allow-abi-mismatch=fixed-x18)
|
||||
|
|
@ -114,16 +166,33 @@ rustdoc: rustdoc-core rustdoc-macros rustdoc-compiler_builtins \
|
|||
$(Q)for f in $(rustdoc_output)/static.files/rustdoc-*.css; do \
|
||||
echo ".logo-container > img { object-fit: contain; }" >> $$f; done
|
||||
|
||||
rustdoc-proc_macro2: private rustdoc_host = yes
|
||||
rustdoc-proc_macro2: private rustc_target_flags = $(proc_macro2-flags)
|
||||
rustdoc-proc_macro2: $(src)/proc-macro2/lib.rs rustdoc-clean FORCE
|
||||
+$(call if_changed,rustdoc)
|
||||
|
||||
rustdoc-quote: private rustdoc_host = yes
|
||||
rustdoc-quote: private rustc_target_flags = $(quote-flags)
|
||||
rustdoc-quote: private skip_flags = $(quote-skip_flags)
|
||||
rustdoc-quote: $(src)/quote/lib.rs rustdoc-clean rustdoc-proc_macro2 FORCE
|
||||
+$(call if_changed,rustdoc)
|
||||
|
||||
rustdoc-syn: private rustdoc_host = yes
|
||||
rustdoc-syn: private rustc_target_flags = $(syn-flags)
|
||||
rustdoc-syn: $(src)/syn/lib.rs rustdoc-clean rustdoc-quote FORCE
|
||||
+$(call if_changed,rustdoc)
|
||||
|
||||
rustdoc-macros: private rustdoc_host = yes
|
||||
rustdoc-macros: private rustc_target_flags = --crate-type proc-macro \
|
||||
--extern proc_macro
|
||||
rustdoc-macros: $(src)/macros/lib.rs rustdoc-clean FORCE
|
||||
--extern proc_macro --extern proc_macro2 --extern quote --extern syn
|
||||
rustdoc-macros: $(src)/macros/lib.rs rustdoc-clean rustdoc-proc_macro2 \
|
||||
rustdoc-quote rustdoc-syn FORCE
|
||||
+$(call if_changed,rustdoc)
|
||||
|
||||
# Starting with Rust 1.82.0, skipping `-Wrustdoc::unescaped_backticks` should
|
||||
# not be needed -- see https://github.com/rust-lang/rust/pull/128307.
|
||||
rustdoc-core: private skip_flags = --edition=2021 -Wrustdoc::unescaped_backticks
|
||||
rustdoc-core: private rustc_target_flags = --edition=$(core-edition) $(core-cfgs)
|
||||
rustdoc-core: private skip_flags = $(core-skip_flags)
|
||||
rustdoc-core: private rustc_target_flags = $(core-flags)
|
||||
rustdoc-core: $(RUST_LIB_SRC)/core/src/lib.rs rustdoc-clean FORCE
|
||||
+$(call if_changed,rustdoc)
|
||||
|
||||
|
|
@ -161,8 +230,8 @@ rustdoc-clean: FORCE
|
|||
quiet_cmd_rustc_test_library = $(RUSTC_OR_CLIPPY_QUIET) TL $<
|
||||
cmd_rustc_test_library = \
|
||||
OBJTREE=$(abspath $(objtree)) \
|
||||
$(RUSTC_OR_CLIPPY) $(rust_common_flags) \
|
||||
@$(objtree)/include/generated/rustc_cfg $(rustc_target_flags) \
|
||||
$(RUSTC_OR_CLIPPY) $(filter-out $(skip_flags),$(rust_common_flags) $(rustc_target_flags)) \
|
||||
@$(objtree)/include/generated/rustc_cfg \
|
||||
--crate-type $(if $(rustc_test_library_proc),proc-macro,rlib) \
|
||||
--out-dir $(objtree)/$(obj)/test --cfg testlib \
|
||||
-L$(objtree)/$(obj)/test \
|
||||
|
|
@ -174,9 +243,24 @@ rusttestlib-build_error: $(src)/build_error.rs FORCE
|
|||
rusttestlib-ffi: $(src)/ffi.rs FORCE
|
||||
+$(call if_changed,rustc_test_library)
|
||||
|
||||
rusttestlib-macros: private rustc_target_flags = --extern proc_macro
|
||||
rusttestlib-proc_macro2: private rustc_target_flags = $(proc_macro2-flags)
|
||||
rusttestlib-proc_macro2: $(src)/proc-macro2/lib.rs FORCE
|
||||
+$(call if_changed,rustc_test_library)
|
||||
|
||||
rusttestlib-quote: private skip_flags = $(quote-skip_flags)
|
||||
rusttestlib-quote: private rustc_target_flags = $(quote-flags)
|
||||
rusttestlib-quote: $(src)/quote/lib.rs rusttestlib-proc_macro2 FORCE
|
||||
+$(call if_changed,rustc_test_library)
|
||||
|
||||
rusttestlib-syn: private rustc_target_flags = $(syn-flags)
|
||||
rusttestlib-syn: $(src)/syn/lib.rs rusttestlib-quote FORCE
|
||||
+$(call if_changed,rustc_test_library)
|
||||
|
||||
rusttestlib-macros: private rustc_target_flags = --extern proc_macro \
|
||||
--extern proc_macro2 --extern quote --extern syn
|
||||
rusttestlib-macros: private rustc_test_library_proc = yes
|
||||
rusttestlib-macros: $(src)/macros/lib.rs FORCE
|
||||
rusttestlib-macros: $(src)/macros/lib.rs \
|
||||
rusttestlib-proc_macro2 rusttestlib-quote rusttestlib-syn FORCE
|
||||
+$(call if_changed,rustc_test_library)
|
||||
|
||||
rusttestlib-pin_init_internal: private rustc_target_flags = --cfg kernel \
|
||||
|
|
@ -257,7 +341,8 @@ quiet_cmd_rustc_test = $(RUSTC_OR_CLIPPY_QUIET) T $<
|
|||
rusttest: rusttest-macros
|
||||
|
||||
rusttest-macros: private rustc_target_flags = --extern proc_macro \
|
||||
--extern macros --extern kernel --extern pin_init
|
||||
--extern macros --extern kernel --extern pin_init \
|
||||
--extern proc_macro2 --extern quote --extern syn
|
||||
rusttest-macros: private rustdoc_test_target_flags = --crate-type proc-macro
|
||||
rusttest-macros: $(src)/macros/lib.rs \
|
||||
rusttestlib-macros rusttestlib-kernel rusttestlib-pin_init FORCE
|
||||
|
|
@ -410,18 +495,47 @@ $(obj)/exports_bindings_generated.h: $(obj)/bindings.o FORCE
|
|||
$(obj)/exports_kernel_generated.h: $(obj)/kernel.o FORCE
|
||||
$(call if_changed,exports)
|
||||
|
||||
quiet_cmd_rustc_procmacrolibrary = $(RUSTC_OR_CLIPPY_QUIET) PL $@
|
||||
cmd_rustc_procmacrolibrary = \
|
||||
$(if $(skip_clippy),$(RUSTC),$(RUSTC_OR_CLIPPY)) \
|
||||
$(filter-out $(skip_flags),$(rust_common_flags) $(rustc_target_flags)) \
|
||||
--emit=dep-info,link --crate-type rlib -O \
|
||||
--out-dir $(objtree)/$(obj) -L$(objtree)/$(obj) \
|
||||
--crate-name $(patsubst lib%.rlib,%,$(notdir $@)) $<; \
|
||||
mv $(objtree)/$(obj)/$(patsubst lib%.rlib,%,$(notdir $@)).d $(depfile); \
|
||||
sed -i '/^\#/d' $(depfile)
|
||||
|
||||
$(obj)/libproc_macro2.rlib: private skip_clippy = 1
|
||||
$(obj)/libproc_macro2.rlib: private rustc_target_flags = $(proc_macro2-flags)
|
||||
$(obj)/libproc_macro2.rlib: $(src)/proc-macro2/lib.rs FORCE
|
||||
+$(call if_changed_dep,rustc_procmacrolibrary)
|
||||
|
||||
$(obj)/libquote.rlib: private skip_clippy = 1
|
||||
$(obj)/libquote.rlib: private skip_flags = $(quote-skip_flags)
|
||||
$(obj)/libquote.rlib: private rustc_target_flags = $(quote-flags)
|
||||
$(obj)/libquote.rlib: $(src)/quote/lib.rs $(obj)/libproc_macro2.rlib FORCE
|
||||
+$(call if_changed_dep,rustc_procmacrolibrary)
|
||||
|
||||
$(obj)/libsyn.rlib: private skip_clippy = 1
|
||||
$(obj)/libsyn.rlib: private rustc_target_flags = $(syn-flags)
|
||||
$(obj)/libsyn.rlib: $(src)/syn/lib.rs $(obj)/libquote.rlib FORCE
|
||||
+$(call if_changed_dep,rustc_procmacrolibrary)
|
||||
|
||||
quiet_cmd_rustc_procmacro = $(RUSTC_OR_CLIPPY_QUIET) P $@
|
||||
cmd_rustc_procmacro = \
|
||||
$(RUSTC_OR_CLIPPY) $(rust_common_flags) $(rustc_target_flags) \
|
||||
-Clinker-flavor=gcc -Clinker=$(HOSTCC) \
|
||||
-Clink-args='$(call escsq,$(KBUILD_PROCMACROLDFLAGS))' \
|
||||
--emit=dep-info=$(depfile) --emit=link=$@ --extern proc_macro \
|
||||
--crate-type proc-macro \
|
||||
--crate-type proc-macro -L$(objtree)/$(obj) \
|
||||
--crate-name $(patsubst lib%.$(libmacros_extension),%,$(notdir $@)) \
|
||||
@$(objtree)/include/generated/rustc_cfg $<
|
||||
|
||||
# Procedural macros can only be used with the `rustc` that compiled it.
|
||||
$(obj)/$(libmacros_name): $(src)/macros/lib.rs FORCE
|
||||
$(obj)/$(libmacros_name): private rustc_target_flags = \
|
||||
--extern proc_macro2 --extern quote --extern syn
|
||||
$(obj)/$(libmacros_name): $(src)/macros/lib.rs $(obj)/libproc_macro2.rlib \
|
||||
$(obj)/libquote.rlib $(obj)/libsyn.rlib FORCE
|
||||
+$(call if_changed_dep,rustc_procmacro)
|
||||
|
||||
$(obj)/$(libpin_init_internal_name): private rustc_target_flags = --cfg kernel
|
||||
|
|
@ -444,6 +558,9 @@ quiet_cmd_rustc_library = $(if $(skip_clippy),RUSTC,$(RUSTC_OR_CLIPPY_QUIET)) L
|
|||
rust-analyzer:
|
||||
$(Q)MAKEFLAGS= $(srctree)/scripts/generate_rust_analyzer.py \
|
||||
--cfgs='core=$(core-cfgs)' $(core-edition) \
|
||||
--cfgs='proc_macro2=$(proc_macro2-cfgs)' \
|
||||
--cfgs='quote=$(quote-cfgs)' \
|
||||
--cfgs='syn=$(syn-cfgs)' \
|
||||
$(realpath $(srctree)) $(realpath $(objtree)) \
|
||||
$(rustc_sysroot) $(RUST_LIB_SRC) $(if $(KBUILD_EXTMOD),$(srcroot)) \
|
||||
> rust-project.json
|
||||
|
|
@ -499,9 +616,9 @@ $(obj)/helpers/helpers.o: $(src)/helpers/helpers.c $(recordmcount_source) FORCE
|
|||
$(obj)/exports.o: private skip_gendwarfksyms = 1
|
||||
|
||||
$(obj)/core.o: private skip_clippy = 1
|
||||
$(obj)/core.o: private skip_flags = --edition=2021 -Wunreachable_pub
|
||||
$(obj)/core.o: private skip_flags = $(core-skip_flags)
|
||||
$(obj)/core.o: private rustc_objcopy = $(foreach sym,$(redirect-intrinsics),--redefine-sym $(sym)=__rust$(sym))
|
||||
$(obj)/core.o: private rustc_target_flags = --edition=$(core-edition) $(core-cfgs)
|
||||
$(obj)/core.o: private rustc_target_flags = $(core-flags)
|
||||
$(obj)/core.o: $(RUST_LIB_SRC)/core/src/lib.rs \
|
||||
$(wildcard $(objtree)/include/config/RUSTC_VERSION_TEXT) FORCE
|
||||
+$(call if_changed_rule,rustc_library)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,13 @@
|
|||
# `proc-macro2`
|
||||
|
||||
These source files come from the Rust `proc-macro2` crate, version
|
||||
1.0.101 (released 2025-08-16), hosted in the
|
||||
<https://github.com/dtolnay/proc-macro2> repository, licensed under
|
||||
"Apache-2.0 OR MIT" and only modified to add the SPDX license
|
||||
identifiers and to remove the `unicode-ident` dependency.
|
||||
|
||||
For copyright details, please see:
|
||||
|
||||
https://github.com/dtolnay/proc-macro2/blob/1.0.101/README.md#license
|
||||
https://github.com/dtolnay/proc-macro2/blob/1.0.101/LICENSE-APACHE
|
||||
https://github.com/dtolnay/proc-macro2/blob/1.0.101/LICENSE-MIT
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use core::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::Once;
|
||||
|
||||
static WORKS: AtomicUsize = AtomicUsize::new(0);
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
pub(crate) fn inside_proc_macro() -> bool {
|
||||
match WORKS.load(Ordering::Relaxed) {
|
||||
1 => return false,
|
||||
2 => return true,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
INIT.call_once(initialize);
|
||||
inside_proc_macro()
|
||||
}
|
||||
|
||||
pub(crate) fn force_fallback() {
|
||||
WORKS.store(1, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub(crate) fn unforce_fallback() {
|
||||
initialize();
|
||||
}
|
||||
|
||||
#[cfg(not(no_is_available))]
|
||||
fn initialize() {
|
||||
let available = proc_macro::is_available();
|
||||
WORKS.store(available as usize + 1, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
|
||||
// then use catch_unwind to determine whether the compiler's proc_macro is
|
||||
// working. When proc-macro2 is used from outside of a procedural macro all
|
||||
// of the proc_macro crate's APIs currently panic.
|
||||
//
|
||||
// The Once is to prevent the possibility of this ordering:
|
||||
//
|
||||
// thread 1 calls take_hook, gets the user's original hook
|
||||
// thread 1 calls set_hook with the null hook
|
||||
// thread 2 calls take_hook, thinks null hook is the original hook
|
||||
// thread 2 calls set_hook with the null hook
|
||||
// thread 1 calls set_hook with the actual original hook
|
||||
// thread 2 calls set_hook with what it thinks is the original hook
|
||||
//
|
||||
// in which the user's hook has been lost.
|
||||
//
|
||||
// There is still a race condition where a panic in a different thread can
|
||||
// happen during the interval that the user's original panic hook is
|
||||
// unregistered such that their hook is incorrectly not called. This is
|
||||
// sufficiently unlikely and less bad than printing panic messages to stderr
|
||||
// on correct use of this crate. Maybe there is a libstd feature request
|
||||
// here. For now, if a user needs to guarantee that this failure mode does
|
||||
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
||||
// the main thread before launching any other threads.
|
||||
#[cfg(no_is_available)]
|
||||
fn initialize() {
|
||||
use std::panic::{self, PanicInfo};
|
||||
|
||||
type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
|
||||
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
||||
let sanity_check = &*null_hook as *const PanicHook;
|
||||
let original_hook = panic::take_hook();
|
||||
panic::set_hook(null_hook);
|
||||
|
||||
let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
|
||||
WORKS.store(works as usize + 1, Ordering::Relaxed);
|
||||
|
||||
let hopefully_null_hook = panic::take_hook();
|
||||
panic::set_hook(original_hook);
|
||||
if sanity_check != &*hopefully_null_hook {
|
||||
panic!("observed race condition in proc_macro2::inside_proc_macro");
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,153 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
//! Items which do not have a correspondence to any API in the proc_macro crate,
|
||||
//! but are necessary to include in proc-macro2.
|
||||
|
||||
use crate::fallback;
|
||||
use crate::imp;
|
||||
use crate::marker::{ProcMacroAutoTraits, MARKER};
|
||||
use crate::Span;
|
||||
use core::fmt::{self, Debug};
|
||||
|
||||
/// Invalidate any `proc_macro2::Span` that exist on the current thread.
|
||||
///
|
||||
/// The implementation of `Span` uses thread-local data structures and this
|
||||
/// function clears them. Calling any method on a `Span` on the current thread
|
||||
/// created prior to the invalidation will return incorrect values or crash.
|
||||
///
|
||||
/// This function is useful for programs that process more than 2<sup>32</sup>
|
||||
/// bytes of Rust source code on the same thread. Just like rustc, proc-macro2
|
||||
/// uses 32-bit source locations, and these wrap around when the total source
|
||||
/// code processed by the same thread exceeds 2<sup>32</sup> bytes (4
|
||||
/// gigabytes). After a wraparound, `Span` methods such as `source_text()` can
|
||||
/// return wrong data.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// As of late 2023, there is 200 GB of Rust code published on crates.io.
|
||||
/// Looking at just the newest version of every crate, it is 16 GB of code. So a
|
||||
/// workload that involves parsing it all would overflow a 32-bit source
|
||||
/// location unless spans are being invalidated.
|
||||
///
|
||||
/// ```
|
||||
/// use flate2::read::GzDecoder;
|
||||
/// use std::ffi::OsStr;
|
||||
/// use std::io::{BufReader, Read};
|
||||
/// use std::str::FromStr;
|
||||
/// use tar::Archive;
|
||||
///
|
||||
/// rayon::scope(|s| {
|
||||
/// for krate in every_version_of_every_crate() {
|
||||
/// s.spawn(move |_| {
|
||||
/// proc_macro2::extra::invalidate_current_thread_spans();
|
||||
///
|
||||
/// let reader = BufReader::new(krate);
|
||||
/// let tar = GzDecoder::new(reader);
|
||||
/// let mut archive = Archive::new(tar);
|
||||
/// for entry in archive.entries().unwrap() {
|
||||
/// let mut entry = entry.unwrap();
|
||||
/// let path = entry.path().unwrap();
|
||||
/// if path.extension() != Some(OsStr::new("rs")) {
|
||||
/// continue;
|
||||
/// }
|
||||
/// let mut content = String::new();
|
||||
/// entry.read_to_string(&mut content).unwrap();
|
||||
/// match proc_macro2::TokenStream::from_str(&content) {
|
||||
/// Ok(tokens) => {/* ... */},
|
||||
/// Err(_) => continue,
|
||||
/// }
|
||||
/// }
|
||||
/// });
|
||||
/// }
|
||||
/// });
|
||||
/// #
|
||||
/// # fn every_version_of_every_crate() -> Vec<std::fs::File> {
|
||||
/// # Vec::new()
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function is not applicable to and will panic if called from a
|
||||
/// procedural macro.
|
||||
#[cfg(span_locations)]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
|
||||
pub fn invalidate_current_thread_spans() {
|
||||
crate::imp::invalidate_current_thread_spans();
|
||||
}
|
||||
|
||||
/// An object that holds a [`Group`]'s `span_open()` and `span_close()` together
|
||||
/// in a more compact representation than holding those 2 spans individually.
|
||||
///
|
||||
/// [`Group`]: crate::Group
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct DelimSpan {
|
||||
inner: DelimSpanEnum,
|
||||
_marker: ProcMacroAutoTraits,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum DelimSpanEnum {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
Compiler {
|
||||
join: proc_macro::Span,
|
||||
open: proc_macro::Span,
|
||||
close: proc_macro::Span,
|
||||
},
|
||||
Fallback(fallback::Span),
|
||||
}
|
||||
|
||||
impl DelimSpan {
|
||||
pub(crate) fn new(group: &imp::Group) -> Self {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
let inner = match group {
|
||||
imp::Group::Compiler(group) => DelimSpanEnum::Compiler {
|
||||
join: group.span(),
|
||||
open: group.span_open(),
|
||||
close: group.span_close(),
|
||||
},
|
||||
imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()),
|
||||
};
|
||||
|
||||
#[cfg(not(wrap_proc_macro))]
|
||||
let inner = DelimSpanEnum::Fallback(group.span());
|
||||
|
||||
DelimSpan {
|
||||
inner,
|
||||
_marker: MARKER,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span covering the entire delimited group.
|
||||
pub fn join(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { join, .. } => Span::_new(imp::Span::Compiler(*join)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(*span),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span for the opening punctuation of the group only.
|
||||
pub fn open(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { open, .. } => Span::_new(imp::Span::Compiler(*open)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span for the closing punctuation of the group only.
|
||||
pub fn close(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { close, .. } => Span::_new(imp::Span::Compiler(*close)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for DelimSpan {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.join(), f)
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,31 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use core::cmp::Ordering;
|
||||
|
||||
/// A line-column pair representing the start or end of a `Span`.
|
||||
///
|
||||
/// This type is semver exempt and not exposed by default.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct LineColumn {
|
||||
/// The 1-indexed line in the source file on which the span starts or ends
|
||||
/// (inclusive).
|
||||
pub line: usize,
|
||||
/// The 0-indexed column (in UTF-8 characters) in the source file on which
|
||||
/// the span starts or ends (inclusive).
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
impl Ord for LineColumn {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.line
|
||||
.cmp(&other.line)
|
||||
.then(self.column.cmp(&other.column))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for LineColumn {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use alloc::rc::Rc;
|
||||
use core::marker::PhantomData;
|
||||
use core::panic::{RefUnwindSafe, UnwindSafe};
|
||||
|
||||
// Zero sized marker with the correct set of autotrait impls we want all proc
|
||||
// macro types to have.
|
||||
#[derive(Copy, Clone)]
|
||||
#[cfg_attr(
|
||||
all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)),
|
||||
derive(PartialEq, Eq)
|
||||
)]
|
||||
pub(crate) struct ProcMacroAutoTraits(PhantomData<Rc<()>>);
|
||||
|
||||
pub(crate) const MARKER: ProcMacroAutoTraits = ProcMacroAutoTraits(PhantomData);
|
||||
|
||||
impl UnwindSafe for ProcMacroAutoTraits {}
|
||||
impl RefUnwindSafe for ProcMacroAutoTraits {}
|
||||
|
|
@ -0,0 +1,997 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::fallback::{
|
||||
self, is_ident_continue, is_ident_start, Group, Ident, LexError, Literal, Span, TokenStream,
|
||||
TokenStreamBuilder,
|
||||
};
|
||||
use crate::{Delimiter, Punct, Spacing, TokenTree};
|
||||
use core::char;
|
||||
use core::str::{Bytes, CharIndices, Chars};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub(crate) struct Cursor<'a> {
|
||||
pub(crate) rest: &'a str,
|
||||
#[cfg(span_locations)]
|
||||
pub(crate) off: u32,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
pub(crate) fn advance(&self, bytes: usize) -> Cursor<'a> {
|
||||
let (_front, rest) = self.rest.split_at(bytes);
|
||||
Cursor {
|
||||
rest,
|
||||
#[cfg(span_locations)]
|
||||
off: self.off + _front.chars().count() as u32,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn starts_with(&self, s: &str) -> bool {
|
||||
self.rest.starts_with(s)
|
||||
}
|
||||
|
||||
pub(crate) fn starts_with_char(&self, ch: char) -> bool {
|
||||
self.rest.starts_with(ch)
|
||||
}
|
||||
|
||||
pub(crate) fn starts_with_fn<Pattern>(&self, f: Pattern) -> bool
|
||||
where
|
||||
Pattern: FnMut(char) -> bool,
|
||||
{
|
||||
self.rest.starts_with(f)
|
||||
}
|
||||
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.rest.is_empty()
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
self.rest.len()
|
||||
}
|
||||
|
||||
fn as_bytes(&self) -> &'a [u8] {
|
||||
self.rest.as_bytes()
|
||||
}
|
||||
|
||||
fn bytes(&self) -> Bytes<'a> {
|
||||
self.rest.bytes()
|
||||
}
|
||||
|
||||
fn chars(&self) -> Chars<'a> {
|
||||
self.rest.chars()
|
||||
}
|
||||
|
||||
fn char_indices(&self) -> CharIndices<'a> {
|
||||
self.rest.char_indices()
|
||||
}
|
||||
|
||||
fn parse(&self, tag: &str) -> Result<Cursor<'a>, Reject> {
|
||||
if self.starts_with(tag) {
|
||||
Ok(self.advance(tag.len()))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Reject;
|
||||
type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>;
|
||||
|
||||
fn skip_whitespace(input: Cursor) -> Cursor {
|
||||
let mut s = input;
|
||||
|
||||
while !s.is_empty() {
|
||||
let byte = s.as_bytes()[0];
|
||||
if byte == b'/' {
|
||||
if s.starts_with("//")
|
||||
&& (!s.starts_with("///") || s.starts_with("////"))
|
||||
&& !s.starts_with("//!")
|
||||
{
|
||||
let (cursor, _) = take_until_newline_or_eof(s);
|
||||
s = cursor;
|
||||
continue;
|
||||
} else if s.starts_with("/**/") {
|
||||
s = s.advance(4);
|
||||
continue;
|
||||
} else if s.starts_with("/*")
|
||||
&& (!s.starts_with("/**") || s.starts_with("/***"))
|
||||
&& !s.starts_with("/*!")
|
||||
{
|
||||
match block_comment(s) {
|
||||
Ok((rest, _)) => {
|
||||
s = rest;
|
||||
continue;
|
||||
}
|
||||
Err(Reject) => return s,
|
||||
}
|
||||
}
|
||||
}
|
||||
match byte {
|
||||
b' ' | 0x09..=0x0d => {
|
||||
s = s.advance(1);
|
||||
continue;
|
||||
}
|
||||
b if b.is_ascii() => {}
|
||||
_ => {
|
||||
let ch = s.chars().next().unwrap();
|
||||
if is_whitespace(ch) {
|
||||
s = s.advance(ch.len_utf8());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
fn block_comment(input: Cursor) -> PResult<&str> {
|
||||
if !input.starts_with("/*") {
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
let mut depth = 0usize;
|
||||
let bytes = input.as_bytes();
|
||||
let mut i = 0usize;
|
||||
let upper = bytes.len() - 1;
|
||||
|
||||
while i < upper {
|
||||
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
||||
depth += 1;
|
||||
i += 1; // eat '*'
|
||||
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
|
||||
}
|
||||
i += 1; // eat '/'
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn is_whitespace(ch: char) -> bool {
|
||||
// Rust treats left-to-right mark and right-to-left mark as whitespace
|
||||
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
||||
}
|
||||
|
||||
fn word_break(input: Cursor) -> Result<Cursor, Reject> {
|
||||
match input.chars().next() {
|
||||
Some(ch) if is_ident_continue(ch) => Err(Reject),
|
||||
Some(_) | None => Ok(input),
|
||||
}
|
||||
}
|
||||
|
||||
// Rustc's representation of a macro expansion error in expression position or
|
||||
// type position.
|
||||
const ERROR: &str = "(/*ERROR*/)";
|
||||
|
||||
pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
|
||||
let mut trees = TokenStreamBuilder::new();
|
||||
let mut stack = Vec::new();
|
||||
|
||||
loop {
|
||||
input = skip_whitespace(input);
|
||||
|
||||
if let Ok((rest, ())) = doc_comment(input, &mut trees) {
|
||||
input = rest;
|
||||
continue;
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
let lo = input.off;
|
||||
|
||||
let first = match input.bytes().next() {
|
||||
Some(first) => first,
|
||||
None => match stack.last() {
|
||||
None => return Ok(trees.build()),
|
||||
#[cfg(span_locations)]
|
||||
Some((lo, _frame)) => {
|
||||
return Err(LexError {
|
||||
span: Span { lo: *lo, hi: *lo },
|
||||
})
|
||||
}
|
||||
#[cfg(not(span_locations))]
|
||||
Some(_frame) => return Err(LexError { span: Span {} }),
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(open_delimiter) = match first {
|
||||
b'(' if !input.starts_with(ERROR) => Some(Delimiter::Parenthesis),
|
||||
b'[' => Some(Delimiter::Bracket),
|
||||
b'{' => Some(Delimiter::Brace),
|
||||
_ => None,
|
||||
} {
|
||||
input = input.advance(1);
|
||||
let frame = (open_delimiter, trees);
|
||||
#[cfg(span_locations)]
|
||||
let frame = (lo, frame);
|
||||
stack.push(frame);
|
||||
trees = TokenStreamBuilder::new();
|
||||
} else if let Some(close_delimiter) = match first {
|
||||
b')' => Some(Delimiter::Parenthesis),
|
||||
b']' => Some(Delimiter::Bracket),
|
||||
b'}' => Some(Delimiter::Brace),
|
||||
_ => None,
|
||||
} {
|
||||
let frame = match stack.pop() {
|
||||
Some(frame) => frame,
|
||||
None => return Err(lex_error(input)),
|
||||
};
|
||||
#[cfg(span_locations)]
|
||||
let (lo, frame) = frame;
|
||||
let (open_delimiter, outer) = frame;
|
||||
if open_delimiter != close_delimiter {
|
||||
return Err(lex_error(input));
|
||||
}
|
||||
input = input.advance(1);
|
||||
let mut g = Group::new(open_delimiter, trees.build());
|
||||
g.set_span(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: input.off,
|
||||
});
|
||||
trees = outer;
|
||||
trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_fallback(g)));
|
||||
} else {
|
||||
let (rest, mut tt) = match leaf_token(input) {
|
||||
Ok((rest, tt)) => (rest, tt),
|
||||
Err(Reject) => return Err(lex_error(input)),
|
||||
};
|
||||
tt.set_span(crate::Span::_new_fallback(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: rest.off,
|
||||
}));
|
||||
trees.push_token_from_parser(tt);
|
||||
input = rest;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lex_error(cursor: Cursor) -> LexError {
|
||||
#[cfg(not(span_locations))]
|
||||
let _ = cursor;
|
||||
LexError {
|
||||
span: Span {
|
||||
#[cfg(span_locations)]
|
||||
lo: cursor.off,
|
||||
#[cfg(span_locations)]
|
||||
hi: cursor.off,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn leaf_token(input: Cursor) -> PResult<TokenTree> {
|
||||
if let Ok((input, l)) = literal(input) {
|
||||
// must be parsed before ident
|
||||
Ok((input, TokenTree::Literal(crate::Literal::_new_fallback(l))))
|
||||
} else if let Ok((input, p)) = punct(input) {
|
||||
Ok((input, TokenTree::Punct(p)))
|
||||
} else if let Ok((input, i)) = ident(input) {
|
||||
Ok((input, TokenTree::Ident(i)))
|
||||
} else if input.starts_with(ERROR) {
|
||||
let rest = input.advance(ERROR.len());
|
||||
let repr = crate::Literal::_new_fallback(Literal::_new(ERROR.to_owned()));
|
||||
Ok((rest, TokenTree::Literal(repr)))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn ident(input: Cursor) -> PResult<crate::Ident> {
|
||||
if [
|
||||
"r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#", "c\"", "cr\"", "cr#",
|
||||
]
|
||||
.iter()
|
||||
.any(|prefix| input.starts_with(prefix))
|
||||
{
|
||||
Err(Reject)
|
||||
} else {
|
||||
ident_any(input)
|
||||
}
|
||||
}
|
||||
|
||||
fn ident_any(input: Cursor) -> PResult<crate::Ident> {
|
||||
let raw = input.starts_with("r#");
|
||||
let rest = input.advance((raw as usize) << 1);
|
||||
|
||||
let (rest, sym) = ident_not_raw(rest)?;
|
||||
|
||||
if !raw {
|
||||
let ident =
|
||||
crate::Ident::_new_fallback(Ident::new_unchecked(sym, fallback::Span::call_site()));
|
||||
return Ok((rest, ident));
|
||||
}
|
||||
|
||||
match sym {
|
||||
"_" | "super" | "self" | "Self" | "crate" => return Err(Reject),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let ident =
|
||||
crate::Ident::_new_fallback(Ident::new_raw_unchecked(sym, fallback::Span::call_site()));
|
||||
Ok((rest, ident))
|
||||
}
|
||||
|
||||
fn ident_not_raw(input: Cursor) -> PResult<&str> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
match chars.next() {
|
||||
Some((_, ch)) if is_ident_start(ch) => {}
|
||||
_ => return Err(Reject),
|
||||
}
|
||||
|
||||
let mut end = input.len();
|
||||
for (i, ch) in chars {
|
||||
if !is_ident_continue(ch) {
|
||||
end = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok((input.advance(end), &input.rest[..end]))
|
||||
}
|
||||
|
||||
pub(crate) fn literal(input: Cursor) -> PResult<Literal> {
|
||||
let rest = literal_nocapture(input)?;
|
||||
let end = input.len() - rest.len();
|
||||
Ok((rest, Literal::_new(input.rest[..end].to_string())))
|
||||
}
|
||||
|
||||
fn literal_nocapture(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(ok) = string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = byte_string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = c_string(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = byte(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = character(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = float(input) {
|
||||
Ok(ok)
|
||||
} else if let Ok(ok) = int(input) {
|
||||
Ok(ok)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn literal_suffix(input: Cursor) -> Cursor {
|
||||
match ident_not_raw(input) {
|
||||
Ok((input, _)) => input,
|
||||
Err(Reject) => input,
|
||||
}
|
||||
}
|
||||
|
||||
fn string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("\"") {
|
||||
cooked_string(input)
|
||||
} else if let Ok(input) = input.parse("r") {
|
||||
raw_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn cooked_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
while let Some((i, ch)) = chars.next() {
|
||||
match ch {
|
||||
'"' => {
|
||||
let input = input.advance(i + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
'\r' => match chars.next() {
|
||||
Some((_, '\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
'\\' => match chars.next() {
|
||||
Some((_, 'x')) => {
|
||||
backslash_x_char(&mut chars)?;
|
||||
}
|
||||
Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0')) => {}
|
||||
Some((_, 'u')) => {
|
||||
backslash_u(&mut chars)?;
|
||||
}
|
||||
Some((newline, ch @ ('\n' | '\r'))) => {
|
||||
input = input.advance(newline + 1);
|
||||
trailing_backslash(&mut input, ch as u8)?;
|
||||
chars = input.char_indices();
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
_ch => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn raw_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let (input, delimiter) = delimiter_of_raw_string(input)?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((i, byte)) = bytes.next() {
|
||||
match byte {
|
||||
b'"' if input.rest[i + 1..].starts_with(delimiter) => {
|
||||
let rest = input.advance(i + 1 + delimiter.len());
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn byte_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("b\"") {
|
||||
cooked_byte_string(input)
|
||||
} else if let Ok(input) = input.parse("br") {
|
||||
raw_byte_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((offset, b)) = bytes.next() {
|
||||
match b {
|
||||
b'"' => {
|
||||
let input = input.advance(offset + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
b'\\' => match bytes.next() {
|
||||
Some((_, b'x')) => {
|
||||
backslash_x_byte(&mut bytes)?;
|
||||
}
|
||||
Some((_, b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"')) => {}
|
||||
Some((newline, b @ (b'\n' | b'\r'))) => {
|
||||
input = input.advance(newline + 1);
|
||||
trailing_backslash(&mut input, b)?;
|
||||
bytes = input.bytes().enumerate();
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
b if b.is_ascii() => {}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn delimiter_of_raw_string(input: Cursor) -> PResult<&str> {
|
||||
for (i, byte) in input.bytes().enumerate() {
|
||||
match byte {
|
||||
b'"' => {
|
||||
if i > 255 {
|
||||
// https://github.com/rust-lang/rust/pull/95251
|
||||
return Err(Reject);
|
||||
}
|
||||
return Ok((input.advance(i + 1), &input.rest[..i]));
|
||||
}
|
||||
b'#' => {}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn raw_byte_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let (input, delimiter) = delimiter_of_raw_string(input)?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((i, byte)) = bytes.next() {
|
||||
match byte {
|
||||
b'"' if input.rest[i + 1..].starts_with(delimiter) => {
|
||||
let rest = input.advance(i + 1 + delimiter.len());
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
other => {
|
||||
if !other.is_ascii() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn c_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
if let Ok(input) = input.parse("c\"") {
|
||||
cooked_c_string(input)
|
||||
} else if let Ok(input) = input.parse("cr") {
|
||||
raw_c_string(input)
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn raw_c_string(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let (input, delimiter) = delimiter_of_raw_string(input)?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
while let Some((i, byte)) = bytes.next() {
|
||||
match byte {
|
||||
b'"' if input.rest[i + 1..].starts_with(delimiter) => {
|
||||
let rest = input.advance(i + 1 + delimiter.len());
|
||||
return Ok(literal_suffix(rest));
|
||||
}
|
||||
b'\r' => match bytes.next() {
|
||||
Some((_, b'\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
b'\0' => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn cooked_c_string(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
while let Some((i, ch)) = chars.next() {
|
||||
match ch {
|
||||
'"' => {
|
||||
let input = input.advance(i + 1);
|
||||
return Ok(literal_suffix(input));
|
||||
}
|
||||
'\r' => match chars.next() {
|
||||
Some((_, '\n')) => {}
|
||||
_ => break,
|
||||
},
|
||||
'\\' => match chars.next() {
|
||||
Some((_, 'x')) => {
|
||||
backslash_x_nonzero(&mut chars)?;
|
||||
}
|
||||
Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"')) => {}
|
||||
Some((_, 'u')) => {
|
||||
if backslash_u(&mut chars)? == '\0' {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Some((newline, ch @ ('\n' | '\r'))) => {
|
||||
input = input.advance(newline + 1);
|
||||
trailing_backslash(&mut input, ch as u8)?;
|
||||
chars = input.char_indices();
|
||||
}
|
||||
_ => break,
|
||||
},
|
||||
'\0' => break,
|
||||
_ch => {}
|
||||
}
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn byte(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let input = input.parse("b'")?;
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
let ok = match bytes.next().map(|(_, b)| b) {
|
||||
Some(b'\\') => match bytes.next().map(|(_, b)| b) {
|
||||
Some(b'x') => backslash_x_byte(&mut bytes).is_ok(),
|
||||
Some(b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"') => true,
|
||||
_ => false,
|
||||
},
|
||||
b => b.is_some(),
|
||||
};
|
||||
if !ok {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (offset, _) = bytes.next().ok_or(Reject)?;
|
||||
if !input.chars().as_str().is_char_boundary(offset) {
|
||||
return Err(Reject);
|
||||
}
|
||||
let input = input.advance(offset).parse("'")?;
|
||||
Ok(literal_suffix(input))
|
||||
}
|
||||
|
||||
fn character(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let input = input.parse("'")?;
|
||||
let mut chars = input.char_indices();
|
||||
let ok = match chars.next().map(|(_, ch)| ch) {
|
||||
Some('\\') => match chars.next().map(|(_, ch)| ch) {
|
||||
Some('x') => backslash_x_char(&mut chars).is_ok(),
|
||||
Some('u') => backslash_u(&mut chars).is_ok(),
|
||||
Some('n' | 'r' | 't' | '\\' | '0' | '\'' | '"') => true,
|
||||
_ => false,
|
||||
},
|
||||
ch => ch.is_some(),
|
||||
};
|
||||
if !ok {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (idx, _) = chars.next().ok_or(Reject)?;
|
||||
let input = input.advance(idx).parse("'")?;
|
||||
Ok(literal_suffix(input))
|
||||
}
|
||||
|
||||
macro_rules! next_ch {
|
||||
($chars:ident @ $pat:pat) => {
|
||||
match $chars.next() {
|
||||
Some((_, ch)) => match ch {
|
||||
$pat => ch,
|
||||
_ => return Err(Reject),
|
||||
},
|
||||
None => return Err(Reject),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn backslash_x_char<I>(chars: &mut I) -> Result<(), Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '0'..='7');
|
||||
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn backslash_x_byte<I>(chars: &mut I) -> Result<(), Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, u8)>,
|
||||
{
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn backslash_x_nonzero<I>(chars: &mut I) -> Result<(), Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
let first = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
let second = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
if first == '0' && second == '0' {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn backslash_u<I>(chars: &mut I) -> Result<char, Reject>
|
||||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '{');
|
||||
let mut value = 0;
|
||||
let mut len = 0;
|
||||
for (_, ch) in chars {
|
||||
let digit = match ch {
|
||||
'0'..='9' => ch as u8 - b'0',
|
||||
'a'..='f' => 10 + ch as u8 - b'a',
|
||||
'A'..='F' => 10 + ch as u8 - b'A',
|
||||
'_' if len > 0 => continue,
|
||||
'}' if len > 0 => return char::from_u32(value).ok_or(Reject),
|
||||
_ => break,
|
||||
};
|
||||
if len == 6 {
|
||||
break;
|
||||
}
|
||||
value *= 0x10;
|
||||
value += u32::from(digit);
|
||||
len += 1;
|
||||
}
|
||||
Err(Reject)
|
||||
}
|
||||
|
||||
fn trailing_backslash(input: &mut Cursor, mut last: u8) -> Result<(), Reject> {
|
||||
let mut whitespace = input.bytes().enumerate();
|
||||
loop {
|
||||
if last == b'\r' && whitespace.next().map_or(true, |(_, b)| b != b'\n') {
|
||||
return Err(Reject);
|
||||
}
|
||||
match whitespace.next() {
|
||||
Some((_, b @ (b' ' | b'\t' | b'\n' | b'\r'))) => {
|
||||
last = b;
|
||||
}
|
||||
Some((offset, _)) => {
|
||||
*input = input.advance(offset);
|
||||
return Ok(());
|
||||
}
|
||||
None => return Err(Reject),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn float(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut rest = float_digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = ident_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn float_digits(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut chars = input.chars().peekable();
|
||||
match chars.next() {
|
||||
Some(ch) if '0' <= ch && ch <= '9' => {}
|
||||
_ => return Err(Reject),
|
||||
}
|
||||
|
||||
let mut len = 1;
|
||||
let mut has_dot = false;
|
||||
let mut has_exp = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'0'..='9' | '_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
'.' => {
|
||||
if has_dot {
|
||||
break;
|
||||
}
|
||||
chars.next();
|
||||
if chars
|
||||
.peek()
|
||||
.map_or(false, |&ch| ch == '.' || is_ident_start(ch))
|
||||
{
|
||||
return Err(Reject);
|
||||
}
|
||||
len += 1;
|
||||
has_dot = true;
|
||||
}
|
||||
'e' | 'E' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp = true;
|
||||
break;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
if !(has_dot || has_exp) {
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
if has_exp {
|
||||
let token_before_exp = if has_dot {
|
||||
Ok(input.advance(len - 1))
|
||||
} else {
|
||||
Err(Reject)
|
||||
};
|
||||
let mut has_sign = false;
|
||||
let mut has_exp_value = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'+' | '-' => {
|
||||
if has_exp_value {
|
||||
break;
|
||||
}
|
||||
if has_sign {
|
||||
return token_before_exp;
|
||||
}
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_sign = true;
|
||||
}
|
||||
'0'..='9' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp_value = true;
|
||||
}
|
||||
'_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
if !has_exp_value {
|
||||
return token_before_exp;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(input.advance(len))
|
||||
}
|
||||
|
||||
fn int(input: Cursor) -> Result<Cursor, Reject> {
|
||||
let mut rest = digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = ident_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn digits(mut input: Cursor) -> Result<Cursor, Reject> {
|
||||
let base = if input.starts_with("0x") {
|
||||
input = input.advance(2);
|
||||
16
|
||||
} else if input.starts_with("0o") {
|
||||
input = input.advance(2);
|
||||
8
|
||||
} else if input.starts_with("0b") {
|
||||
input = input.advance(2);
|
||||
2
|
||||
} else {
|
||||
10
|
||||
};
|
||||
|
||||
let mut len = 0;
|
||||
let mut empty = true;
|
||||
for b in input.bytes() {
|
||||
match b {
|
||||
b'0'..=b'9' => {
|
||||
let digit = (b - b'0') as u64;
|
||||
if digit >= base {
|
||||
return Err(Reject);
|
||||
}
|
||||
}
|
||||
b'a'..=b'f' => {
|
||||
let digit = 10 + (b - b'a') as u64;
|
||||
if digit >= base {
|
||||
break;
|
||||
}
|
||||
}
|
||||
b'A'..=b'F' => {
|
||||
let digit = 10 + (b - b'A') as u64;
|
||||
if digit >= base {
|
||||
break;
|
||||
}
|
||||
}
|
||||
b'_' => {
|
||||
if empty && base == 10 {
|
||||
return Err(Reject);
|
||||
}
|
||||
len += 1;
|
||||
continue;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
len += 1;
|
||||
empty = false;
|
||||
}
|
||||
if empty {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok(input.advance(len))
|
||||
}
|
||||
}
|
||||
|
||||
fn punct(input: Cursor) -> PResult<Punct> {
|
||||
let (rest, ch) = punct_char(input)?;
|
||||
if ch == '\'' {
|
||||
let (after_lifetime, _ident) = ident_any(rest)?;
|
||||
if after_lifetime.starts_with_char('\'')
|
||||
|| (after_lifetime.starts_with_char('#') && !rest.starts_with("r#"))
|
||||
{
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok((rest, Punct::new('\'', Spacing::Joint)))
|
||||
}
|
||||
} else {
|
||||
let kind = match punct_char(rest) {
|
||||
Ok(_) => Spacing::Joint,
|
||||
Err(Reject) => Spacing::Alone,
|
||||
};
|
||||
Ok((rest, Punct::new(ch, kind)))
|
||||
}
|
||||
}
|
||||
|
||||
fn punct_char(input: Cursor) -> PResult<char> {
|
||||
if input.starts_with("//") || input.starts_with("/*") {
|
||||
// Do not accept `/` of a comment as a punct.
|
||||
return Err(Reject);
|
||||
}
|
||||
|
||||
let mut chars = input.chars();
|
||||
let first = match chars.next() {
|
||||
Some(ch) => ch,
|
||||
None => {
|
||||
return Err(Reject);
|
||||
}
|
||||
};
|
||||
let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
|
||||
if recognized.contains(first) {
|
||||
Ok((input.advance(first.len_utf8()), first))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult<'a, ()> {
|
||||
#[cfg(span_locations)]
|
||||
let lo = input.off;
|
||||
let (rest, (comment, inner)) = doc_comment_contents(input)?;
|
||||
let fallback_span = Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: rest.off,
|
||||
};
|
||||
let span = crate::Span::_new_fallback(fallback_span);
|
||||
|
||||
let mut scan_for_bare_cr = comment;
|
||||
while let Some(cr) = scan_for_bare_cr.find('\r') {
|
||||
let rest = &scan_for_bare_cr[cr + 1..];
|
||||
if !rest.starts_with('\n') {
|
||||
return Err(Reject);
|
||||
}
|
||||
scan_for_bare_cr = rest;
|
||||
}
|
||||
|
||||
let mut pound = Punct::new('#', Spacing::Alone);
|
||||
pound.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Punct(pound));
|
||||
|
||||
if inner {
|
||||
let mut bang = Punct::new('!', Spacing::Alone);
|
||||
bang.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Punct(bang));
|
||||
}
|
||||
|
||||
let doc_ident = crate::Ident::_new_fallback(Ident::new_unchecked("doc", fallback_span));
|
||||
let mut equal = Punct::new('=', Spacing::Alone);
|
||||
equal.set_span(span);
|
||||
let mut literal = crate::Literal::_new_fallback(Literal::string(comment));
|
||||
literal.set_span(span);
|
||||
let mut bracketed = TokenStreamBuilder::with_capacity(3);
|
||||
bracketed.push_token_from_parser(TokenTree::Ident(doc_ident));
|
||||
bracketed.push_token_from_parser(TokenTree::Punct(equal));
|
||||
bracketed.push_token_from_parser(TokenTree::Literal(literal));
|
||||
let group = Group::new(Delimiter::Bracket, bracketed.build());
|
||||
let mut group = crate::Group::_new_fallback(group);
|
||||
group.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Group(group));
|
||||
|
||||
Ok((rest, ()))
|
||||
}
|
||||
|
||||
fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
|
||||
if input.starts_with("//!") {
|
||||
let input = input.advance(3);
|
||||
let (input, s) = take_until_newline_or_eof(input);
|
||||
Ok((input, (s, true)))
|
||||
} else if input.starts_with("/*!") {
|
||||
let (input, s) = block_comment(input)?;
|
||||
Ok((input, (&s[3..s.len() - 2], true)))
|
||||
} else if input.starts_with("///") {
|
||||
let input = input.advance(3);
|
||||
if input.starts_with_char('/') {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (input, s) = take_until_newline_or_eof(input);
|
||||
Ok((input, (s, false)))
|
||||
} else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
|
||||
let (input, s) = block_comment(input)?;
|
||||
Ok((input, (&s[3..s.len() - 2], false)))
|
||||
} else {
|
||||
Err(Reject)
|
||||
}
|
||||
}
|
||||
|
||||
fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
|
||||
let chars = input.char_indices();
|
||||
|
||||
for (i, ch) in chars {
|
||||
if ch == '\n' {
|
||||
return (input.advance(i), &input.rest[..i]);
|
||||
} else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
|
||||
return (input.advance(i + 1), &input.rest[..i]);
|
||||
}
|
||||
}
|
||||
|
||||
(input.advance(input.len()), input.rest)
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[cfg(proc_macro_span)]
|
||||
pub(crate) mod proc_macro_span;
|
||||
|
||||
#[cfg(proc_macro_span_file)]
|
||||
pub(crate) mod proc_macro_span_file;
|
||||
|
||||
#[cfg(proc_macro_span_location)]
|
||||
pub(crate) mod proc_macro_span_location;
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
// This code exercises the surface area that we expect of Span's unstable API.
|
||||
// If the current toolchain is able to compile it, then proc-macro2 is able to
|
||||
// offer these APIs too.
|
||||
|
||||
#![cfg_attr(procmacro2_build_probe, feature(proc_macro_span))]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
use core::ops::{Range, RangeBounds};
|
||||
use proc_macro::{Literal, Span};
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn byte_range(this: &Span) -> Range<usize> {
|
||||
this.byte_range()
|
||||
}
|
||||
|
||||
pub fn start(this: &Span) -> Span {
|
||||
this.start()
|
||||
}
|
||||
|
||||
pub fn end(this: &Span) -> Span {
|
||||
this.end()
|
||||
}
|
||||
|
||||
pub fn line(this: &Span) -> usize {
|
||||
this.line()
|
||||
}
|
||||
|
||||
pub fn column(this: &Span) -> usize {
|
||||
this.column()
|
||||
}
|
||||
|
||||
pub fn file(this: &Span) -> String {
|
||||
this.file()
|
||||
}
|
||||
|
||||
pub fn local_file(this: &Span) -> Option<PathBuf> {
|
||||
this.local_file()
|
||||
}
|
||||
|
||||
pub fn join(this: &Span, other: Span) -> Option<Span> {
|
||||
this.join(other)
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(this: &Literal, range: R) -> Option<Span> {
|
||||
this.subspan(range)
|
||||
}
|
||||
|
||||
// Include in sccache cache key.
|
||||
#[cfg(procmacro2_build_probe)]
|
||||
const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP");
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
// The subset of Span's API stabilized in Rust 1.88.
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
use proc_macro::Span;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn file(this: &Span) -> String {
|
||||
this.file()
|
||||
}
|
||||
|
||||
pub fn local_file(this: &Span) -> Option<PathBuf> {
|
||||
this.local_file()
|
||||
}
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
// The subset of Span's API stabilized in Rust 1.88.
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
use proc_macro::Span;
|
||||
|
||||
pub fn start(this: &Span) -> Span {
|
||||
this.start()
|
||||
}
|
||||
|
||||
pub fn end(this: &Span) -> Span {
|
||||
this.end()
|
||||
}
|
||||
|
||||
pub fn line(this: &Span) -> usize {
|
||||
this.line()
|
||||
}
|
||||
|
||||
pub fn column(this: &Span) -> usize {
|
||||
this.column()
|
||||
}
|
||||
|
|
@ -0,0 +1,148 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use alloc::rc::Rc;
|
||||
use alloc::vec;
|
||||
use core::mem;
|
||||
use core::panic::RefUnwindSafe;
|
||||
use core::slice;
|
||||
|
||||
pub(crate) struct RcVec<T> {
|
||||
inner: Rc<Vec<T>>,
|
||||
}
|
||||
|
||||
pub(crate) struct RcVecBuilder<T> {
|
||||
inner: Vec<T>,
|
||||
}
|
||||
|
||||
pub(crate) struct RcVecMut<'a, T> {
|
||||
inner: &'a mut Vec<T>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct RcVecIntoIter<T> {
|
||||
inner: vec::IntoIter<T>,
|
||||
}
|
||||
|
||||
impl<T> RcVec<T> {
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.inner.is_empty()
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.inner.len()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> slice::Iter<T> {
|
||||
self.inner.iter()
|
||||
}
|
||||
|
||||
pub(crate) fn make_mut(&mut self) -> RcVecMut<T>
|
||||
where
|
||||
T: Clone,
|
||||
{
|
||||
RcVecMut {
|
||||
inner: Rc::make_mut(&mut self.inner),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_mut(&mut self) -> Option<RcVecMut<T>> {
|
||||
let inner = Rc::get_mut(&mut self.inner)?;
|
||||
Some(RcVecMut { inner })
|
||||
}
|
||||
|
||||
pub(crate) fn make_owned(mut self) -> RcVecBuilder<T>
|
||||
where
|
||||
T: Clone,
|
||||
{
|
||||
let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) {
|
||||
mem::take(owned)
|
||||
} else {
|
||||
Vec::clone(&self.inner)
|
||||
};
|
||||
RcVecBuilder { inner: vec }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RcVecBuilder<T> {
|
||||
pub(crate) fn new() -> Self {
|
||||
RcVecBuilder { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub(crate) fn with_capacity(cap: usize) -> Self {
|
||||
RcVecBuilder {
|
||||
inner: Vec::with_capacity(cap),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn push(&mut self, element: T) {
|
||||
self.inner.push(element);
|
||||
}
|
||||
|
||||
pub(crate) fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
|
||||
pub(crate) fn as_mut(&mut self) -> RcVecMut<T> {
|
||||
RcVecMut {
|
||||
inner: &mut self.inner,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> RcVec<T> {
|
||||
RcVec {
|
||||
inner: Rc::new(self.inner),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> RcVecMut<'a, T> {
|
||||
pub(crate) fn push(&mut self, element: T) {
|
||||
self.inner.push(element);
|
||||
}
|
||||
|
||||
pub(crate) fn extend(&mut self, iter: impl IntoIterator<Item = T>) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
|
||||
pub(crate) fn as_mut(&mut self) -> RcVecMut<T> {
|
||||
RcVecMut { inner: self.inner }
|
||||
}
|
||||
|
||||
pub(crate) fn take(self) -> RcVecBuilder<T> {
|
||||
let vec = mem::take(self.inner);
|
||||
RcVecBuilder { inner: vec }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Clone for RcVec<T> {
|
||||
fn clone(&self) -> Self {
|
||||
RcVec {
|
||||
inner: Rc::clone(&self.inner),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IntoIterator for RcVecBuilder<T> {
|
||||
type Item = T;
|
||||
type IntoIter = RcVecIntoIter<T>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
RcVecIntoIter {
|
||||
inner: self.inner.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Iterator for RcVecIntoIter<T> {
|
||||
type Item = T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RefUnwindSafe for RcVec<T> where T: RefUnwindSafe {}
|
||||
|
|
@ -0,0 +1,986 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::detection::inside_proc_macro;
|
||||
use crate::fallback::{self, FromStr2 as _};
|
||||
#[cfg(span_locations)]
|
||||
use crate::location::LineColumn;
|
||||
#[cfg(proc_macro_span)]
|
||||
use crate::probe::proc_macro_span;
|
||||
#[cfg(all(span_locations, proc_macro_span_file))]
|
||||
use crate::probe::proc_macro_span_file;
|
||||
#[cfg(all(span_locations, proc_macro_span_location))]
|
||||
use crate::probe::proc_macro_span_location;
|
||||
use crate::{Delimiter, Punct, Spacing, TokenTree};
|
||||
use core::fmt::{self, Debug, Display};
|
||||
#[cfg(span_locations)]
|
||||
use core::ops::Range;
|
||||
use core::ops::RangeBounds;
|
||||
use std::ffi::CStr;
|
||||
#[cfg(span_locations)]
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum TokenStream {
|
||||
Compiler(DeferredTokenStream),
|
||||
Fallback(fallback::TokenStream),
|
||||
}
|
||||
|
||||
// Work around https://github.com/rust-lang/rust/issues/65080.
|
||||
// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
|
||||
// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
|
||||
// late as possible to batch together consecutive uses of the Extend impl.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct DeferredTokenStream {
|
||||
stream: proc_macro::TokenStream,
|
||||
extra: Vec<proc_macro::TokenTree>,
|
||||
}
|
||||
|
||||
pub(crate) enum LexError {
|
||||
Compiler(proc_macro::LexError),
|
||||
Fallback(fallback::LexError),
|
||||
|
||||
// Rustc was supposed to return a LexError, but it panicked instead.
|
||||
// https://github.com/rust-lang/rust/issues/58736
|
||||
CompilerPanic,
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn mismatch(line: u32) -> ! {
|
||||
#[cfg(procmacro2_backtrace)]
|
||||
{
|
||||
let backtrace = std::backtrace::Backtrace::force_capture();
|
||||
panic!("compiler/fallback mismatch L{}\n\n{}", line, backtrace)
|
||||
}
|
||||
#[cfg(not(procmacro2_backtrace))]
|
||||
{
|
||||
panic!("compiler/fallback mismatch L{}", line)
|
||||
}
|
||||
}
|
||||
|
||||
impl DeferredTokenStream {
|
||||
fn new(stream: proc_macro::TokenStream) -> Self {
|
||||
DeferredTokenStream {
|
||||
stream,
|
||||
extra: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.stream.is_empty() && self.extra.is_empty()
|
||||
}
|
||||
|
||||
fn evaluate_now(&mut self) {
|
||||
// If-check provides a fast short circuit for the common case of `extra`
|
||||
// being empty, which saves a round trip over the proc macro bridge.
|
||||
// Improves macro expansion time in winrt by 6% in debug mode.
|
||||
if !self.extra.is_empty() {
|
||||
self.stream.extend(self.extra.drain(..));
|
||||
}
|
||||
}
|
||||
|
||||
fn into_token_stream(mut self) -> proc_macro::TokenStream {
|
||||
self.evaluate_now();
|
||||
self.stream
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub(crate) fn new() -> Self {
|
||||
if inside_proc_macro() {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
|
||||
} else {
|
||||
TokenStream::Fallback(fallback::TokenStream::new())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_str_checked(src: &str) -> Result<Self, LexError> {
|
||||
if inside_proc_macro() {
|
||||
Ok(TokenStream::Compiler(DeferredTokenStream::new(
|
||||
proc_macro::TokenStream::from_str_checked(src)?,
|
||||
)))
|
||||
} else {
|
||||
Ok(TokenStream::Fallback(
|
||||
fallback::TokenStream::from_str_checked(src)?,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => tts.is_empty(),
|
||||
TokenStream::Fallback(tts) => tts.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(s) => s.into_token_stream(),
|
||||
TokenStream::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_stable(self) -> fallback::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(_) => mismatch(line!()),
|
||||
TokenStream::Fallback(s) => s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
|
||||
TokenStream::Fallback(tts) => Display::fmt(tts, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> Self {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(inner))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> Self {
|
||||
match inner {
|
||||
TokenStream::Compiler(inner) => inner.into_token_stream(),
|
||||
TokenStream::Fallback(inner) => {
|
||||
proc_macro::TokenStream::from_str_unchecked(&inner.to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::TokenStream> for TokenStream {
|
||||
fn from(inner: fallback::TokenStream) -> Self {
|
||||
TokenStream::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
// Assumes inside_proc_macro().
|
||||
fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
|
||||
match token {
|
||||
TokenTree::Group(tt) => proc_macro::TokenTree::Group(tt.inner.unwrap_nightly()),
|
||||
TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
Spacing::Joint => proc_macro::Spacing::Joint,
|
||||
Spacing::Alone => proc_macro::Spacing::Alone,
|
||||
};
|
||||
let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
|
||||
punct.set_span(tt.span().inner.unwrap_nightly());
|
||||
proc_macro::TokenTree::Punct(punct)
|
||||
}
|
||||
TokenTree::Ident(tt) => proc_macro::TokenTree::Ident(tt.inner.unwrap_nightly()),
|
||||
TokenTree::Literal(tt) => proc_macro::TokenTree::Literal(tt.inner.unwrap_nightly()),
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(token: TokenTree) -> Self {
|
||||
if inside_proc_macro() {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::from(
|
||||
into_compiler_token(token),
|
||||
)))
|
||||
} else {
|
||||
TokenStream::Fallback(fallback::TokenStream::from(token))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||
if inside_proc_macro() {
|
||||
TokenStream::Compiler(DeferredTokenStream::new(
|
||||
trees.into_iter().map(into_compiler_token).collect(),
|
||||
))
|
||||
} else {
|
||||
TokenStream::Fallback(trees.into_iter().collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut streams = streams.into_iter();
|
||||
match streams.next() {
|
||||
Some(TokenStream::Compiler(mut first)) => {
|
||||
first.evaluate_now();
|
||||
first.stream.extend(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s.into_token_stream(),
|
||||
TokenStream::Fallback(_) => mismatch(line!()),
|
||||
}));
|
||||
TokenStream::Compiler(first)
|
||||
}
|
||||
Some(TokenStream::Fallback(mut first)) => {
|
||||
first.extend(streams.map(|s| match s {
|
||||
TokenStream::Fallback(s) => s,
|
||||
TokenStream::Compiler(_) => mismatch(line!()),
|
||||
}));
|
||||
TokenStream::Fallback(first)
|
||||
}
|
||||
None => TokenStream::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
// Here is the reason for DeferredTokenStream.
|
||||
for token in stream {
|
||||
tts.extra.push(into_compiler_token(token));
|
||||
}
|
||||
}
|
||||
TokenStream::Fallback(tts) => tts.extend(stream),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenStream> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
tts.evaluate_now();
|
||||
tts.stream
|
||||
.extend(streams.into_iter().map(TokenStream::unwrap_nightly));
|
||||
}
|
||||
TokenStream::Fallback(tts) => {
|
||||
tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
|
||||
TokenStream::Fallback(tts) => Debug::fmt(tts, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LexError {
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
LexError::Compiler(_) | LexError::CompilerPanic => Span::call_site(),
|
||||
LexError::Fallback(e) => Span::Fallback(e.span()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::LexError> for LexError {
|
||||
fn from(e: proc_macro::LexError) -> Self {
|
||||
LexError::Compiler(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::LexError> for LexError {
|
||||
fn from(e: fallback::LexError) -> Self {
|
||||
LexError::Fallback(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
LexError::Compiler(e) => Debug::fmt(e, f),
|
||||
LexError::Fallback(e) => Debug::fmt(e, f),
|
||||
LexError::CompilerPanic => {
|
||||
let fallback = fallback::LexError::call_site();
|
||||
Debug::fmt(&fallback, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
LexError::Compiler(e) => Display::fmt(e, f),
|
||||
LexError::Fallback(e) => Display::fmt(e, f),
|
||||
LexError::CompilerPanic => {
|
||||
let fallback = fallback::LexError::call_site();
|
||||
Display::fmt(&fallback, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum TokenTreeIter {
|
||||
Compiler(proc_macro::token_stream::IntoIter),
|
||||
Fallback(fallback::TokenTreeIter),
|
||||
}
|
||||
|
||||
impl IntoIterator for TokenStream {
|
||||
type Item = TokenTree;
|
||||
type IntoIter = TokenTreeIter;
|
||||
|
||||
fn into_iter(self) -> TokenTreeIter {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
TokenTreeIter::Compiler(tts.into_token_stream().into_iter())
|
||||
}
|
||||
TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for TokenTreeIter {
|
||||
type Item = TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<TokenTree> {
|
||||
let token = match self {
|
||||
TokenTreeIter::Compiler(iter) => iter.next()?,
|
||||
TokenTreeIter::Fallback(iter) => return iter.next(),
|
||||
};
|
||||
Some(match token {
|
||||
proc_macro::TokenTree::Group(tt) => {
|
||||
TokenTree::Group(crate::Group::_new(Group::Compiler(tt)))
|
||||
}
|
||||
proc_macro::TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
proc_macro::Spacing::Joint => Spacing::Joint,
|
||||
proc_macro::Spacing::Alone => Spacing::Alone,
|
||||
};
|
||||
let mut o = Punct::new(tt.as_char(), spacing);
|
||||
o.set_span(crate::Span::_new(Span::Compiler(tt.span())));
|
||||
TokenTree::Punct(o)
|
||||
}
|
||||
proc_macro::TokenTree::Ident(s) => {
|
||||
TokenTree::Ident(crate::Ident::_new(Ident::Compiler(s)))
|
||||
}
|
||||
proc_macro::TokenTree::Literal(l) => {
|
||||
TokenTree::Literal(crate::Literal::_new(Literal::Compiler(l)))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
match self {
|
||||
TokenTreeIter::Compiler(tts) => tts.size_hint(),
|
||||
TokenTreeIter::Fallback(tts) => tts.size_hint(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub(crate) enum Span {
|
||||
Compiler(proc_macro::Span),
|
||||
Fallback(fallback::Span),
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub(crate) fn call_site() -> Self {
|
||||
if inside_proc_macro() {
|
||||
Span::Compiler(proc_macro::Span::call_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn mixed_site() -> Self {
|
||||
if inside_proc_macro() {
|
||||
Span::Compiler(proc_macro::Span::mixed_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::mixed_site())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub(crate) fn def_site() -> Self {
|
||||
if inside_proc_macro() {
|
||||
Span::Compiler(proc_macro::Span::def_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::def_site())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolved_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
|
||||
(Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn located_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
|
||||
(Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn unwrap(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub(crate) fn byte_range(&self) -> Range<usize> {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Span::Compiler(s) => proc_macro_span::byte_range(s),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Span::Compiler(_) => 0..0,
|
||||
Span::Fallback(s) => s.byte_range(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub(crate) fn start(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(proc_macro_span_location)]
|
||||
Span::Compiler(s) => LineColumn {
|
||||
line: proc_macro_span_location::line(s),
|
||||
column: proc_macro_span_location::column(s).saturating_sub(1),
|
||||
},
|
||||
#[cfg(not(proc_macro_span_location))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => s.start(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub(crate) fn end(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(proc_macro_span_location)]
|
||||
Span::Compiler(s) => {
|
||||
let end = proc_macro_span_location::end(s);
|
||||
LineColumn {
|
||||
line: proc_macro_span_location::line(&end),
|
||||
column: proc_macro_span_location::column(&end).saturating_sub(1),
|
||||
}
|
||||
}
|
||||
#[cfg(not(proc_macro_span_location))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => s.end(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub(crate) fn file(&self) -> String {
|
||||
match self {
|
||||
#[cfg(proc_macro_span_file)]
|
||||
Span::Compiler(s) => proc_macro_span_file::file(s),
|
||||
#[cfg(not(proc_macro_span_file))]
|
||||
Span::Compiler(_) => "<token stream>".to_owned(),
|
||||
Span::Fallback(s) => s.file(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub(crate) fn local_file(&self) -> Option<PathBuf> {
|
||||
match self {
|
||||
#[cfg(proc_macro_span_file)]
|
||||
Span::Compiler(s) => proc_macro_span_file::local_file(s),
|
||||
#[cfg(not(proc_macro_span_file))]
|
||||
Span::Compiler(_) => None,
|
||||
Span::Fallback(s) => s.local_file(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn join(&self, other: Span) -> Option<Span> {
|
||||
let ret = match (self, other) {
|
||||
#[cfg(proc_macro_span)]
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(proc_macro_span::join(a, b)?),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?),
|
||||
_ => return None,
|
||||
};
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub(crate) fn eq(&self, other: &Span) -> bool {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => a.eq(b),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => a.eq(b),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn source_text(&self) -> Option<String> {
|
||||
match self {
|
||||
#[cfg(not(no_source_text))]
|
||||
Span::Compiler(s) => s.source_text(),
|
||||
#[cfg(no_source_text)]
|
||||
Span::Compiler(_) => None,
|
||||
Span::Fallback(s) => s.source_text(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::Span> for crate::Span {
|
||||
fn from(proc_span: proc_macro::Span) -> Self {
|
||||
crate::Span::_new(Span::Compiler(proc_span))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Span> for Span {
|
||||
fn from(inner: fallback::Span) -> Self {
|
||||
Span::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Span {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Span::Compiler(s) => Debug::fmt(s, f),
|
||||
Span::Fallback(s) => Debug::fmt(s, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
||||
match span {
|
||||
Span::Compiler(s) => {
|
||||
debug.field("span", &s);
|
||||
}
|
||||
Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum Group {
|
||||
Compiler(proc_macro::Group),
|
||||
Fallback(fallback::Group),
|
||||
}
|
||||
|
||||
impl Group {
|
||||
pub(crate) fn new(delimiter: Delimiter, stream: TokenStream) -> Self {
|
||||
match stream {
|
||||
TokenStream::Compiler(tts) => {
|
||||
let delimiter = match delimiter {
|
||||
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
|
||||
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
|
||||
Delimiter::Brace => proc_macro::Delimiter::Brace,
|
||||
Delimiter::None => proc_macro::Delimiter::None,
|
||||
};
|
||||
Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream()))
|
||||
}
|
||||
TokenStream::Fallback(stream) => {
|
||||
Group::Fallback(fallback::Group::new(delimiter, stream))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn delimiter(&self) -> Delimiter {
|
||||
match self {
|
||||
Group::Compiler(g) => match g.delimiter() {
|
||||
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
|
||||
proc_macro::Delimiter::Bracket => Delimiter::Bracket,
|
||||
proc_macro::Delimiter::Brace => Delimiter::Brace,
|
||||
proc_macro::Delimiter::None => Delimiter::None,
|
||||
},
|
||||
Group::Fallback(g) => g.delimiter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn stream(&self) -> TokenStream {
|
||||
match self {
|
||||
Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())),
|
||||
Group::Fallback(g) => TokenStream::Fallback(g.stream()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn span_open(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span_open()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_open()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn span_close(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span_close()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_close()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Group::Compiler(g), Span::Compiler(s)) => g.set_span(s),
|
||||
(Group::Fallback(g), Span::Fallback(s)) => g.set_span(s),
|
||||
(Group::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Group::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Group {
|
||||
match self {
|
||||
Group::Compiler(g) => g,
|
||||
Group::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Group> for Group {
|
||||
fn from(g: fallback::Group) -> Self {
|
||||
Group::Fallback(g)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => Display::fmt(group, formatter),
|
||||
Group::Fallback(group) => Display::fmt(group, formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => Debug::fmt(group, formatter),
|
||||
Group::Fallback(group) => Debug::fmt(group, formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum Ident {
|
||||
Compiler(proc_macro::Ident),
|
||||
Fallback(fallback::Ident),
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
#[track_caller]
|
||||
pub(crate) fn new_checked(string: &str, span: Span) -> Self {
|
||||
match span {
|
||||
Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_checked(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn new_raw_checked(string: &str, span: Span) -> Self {
|
||||
match span {
|
||||
Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new_raw(string, s)),
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw_checked(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
Ident::Compiler(t) => Span::Compiler(t.span()),
|
||||
Ident::Fallback(t) => Span::Fallback(t.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s),
|
||||
(Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s),
|
||||
(Ident::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Ident::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Ident {
|
||||
match self {
|
||||
Ident::Compiler(s) => s,
|
||||
Ident::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Ident> for Ident {
|
||||
fn from(inner: fallback::Ident) -> Self {
|
||||
Ident::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Ident {
|
||||
fn eq(&self, other: &Ident) -> bool {
|
||||
match (self, other) {
|
||||
(Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(),
|
||||
(Ident::Fallback(t), Ident::Fallback(o)) => t == o,
|
||||
(Ident::Compiler(_), Ident::Fallback(_)) => mismatch(line!()),
|
||||
(Ident::Fallback(_), Ident::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq<T> for Ident
|
||||
where
|
||||
T: ?Sized + AsRef<str>,
|
||||
{
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
let other = other.as_ref();
|
||||
match self {
|
||||
Ident::Compiler(t) => t.to_string() == other,
|
||||
Ident::Fallback(t) => t == other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => Display::fmt(t, f),
|
||||
Ident::Fallback(t) => Display::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => Debug::fmt(t, f),
|
||||
Ident::Fallback(t) => Debug::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) enum Literal {
|
||||
Compiler(proc_macro::Literal),
|
||||
Fallback(fallback::Literal),
|
||||
}
|
||||
|
||||
macro_rules! suffixed_numbers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub(crate) fn $name(n: $kind) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
macro_rules! unsuffixed_integers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub(crate) fn $name(n: $kind) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
pub(crate) fn from_str_checked(repr: &str) -> Result<Self, LexError> {
|
||||
if inside_proc_macro() {
|
||||
let literal = proc_macro::Literal::from_str_checked(repr)?;
|
||||
Ok(Literal::Compiler(literal))
|
||||
} else {
|
||||
let literal = fallback::Literal::from_str_checked(repr)?;
|
||||
Ok(Literal::Fallback(literal))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) unsafe fn from_str_unchecked(repr: &str) -> Self {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::from_str_unchecked(repr))
|
||||
} else {
|
||||
Literal::Fallback(unsafe { fallback::Literal::from_str_unchecked(repr) })
|
||||
}
|
||||
}
|
||||
|
||||
suffixed_numbers! {
|
||||
u8_suffixed => u8,
|
||||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
unsuffixed_integers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
i128_unsuffixed => i128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
pub(crate) fn f32_unsuffixed(f: f32) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn f64_unsuffixed(f: f64) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn string(string: &str) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::string(string))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::string(string))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn character(ch: char) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::character(ch))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::character(ch))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn byte_character(byte: u8) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler({
|
||||
#[cfg(not(no_literal_byte_character))]
|
||||
{
|
||||
proc_macro::Literal::byte_character(byte)
|
||||
}
|
||||
|
||||
#[cfg(no_literal_byte_character)]
|
||||
{
|
||||
let fallback = fallback::Literal::byte_character(byte);
|
||||
proc_macro::Literal::from_str_unchecked(&fallback.repr)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::byte_character(byte))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn byte_string(bytes: &[u8]) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler(proc_macro::Literal::byte_string(bytes))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::byte_string(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn c_string(string: &CStr) -> Literal {
|
||||
if inside_proc_macro() {
|
||||
Literal::Compiler({
|
||||
#[cfg(not(no_literal_c_string))]
|
||||
{
|
||||
proc_macro::Literal::c_string(string)
|
||||
}
|
||||
|
||||
#[cfg(no_literal_c_string)]
|
||||
{
|
||||
let fallback = fallback::Literal::c_string(string);
|
||||
proc_macro::Literal::from_str_unchecked(&fallback.repr)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::c_string(string))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
Literal::Compiler(lit) => Span::Compiler(lit.span()),
|
||||
Literal::Fallback(lit) => Span::Fallback(lit.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s),
|
||||
(Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s),
|
||||
(Literal::Compiler(_), Span::Fallback(_)) => mismatch(line!()),
|
||||
(Literal::Fallback(_), Span::Compiler(_)) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Literal::Compiler(lit) => proc_macro_span::subspan(lit, range).map(Span::Compiler),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Literal::Compiler(_lit) => None,
|
||||
Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Literal {
|
||||
match self {
|
||||
Literal::Compiler(s) => s,
|
||||
Literal::Fallback(_) => mismatch(line!()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Literal> for Literal {
|
||||
fn from(s: fallback::Literal) -> Self {
|
||||
Literal::Fallback(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => Display::fmt(t, f),
|
||||
Literal::Fallback(t) => Display::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => Debug::fmt(t, f),
|
||||
Literal::Fallback(t) => Debug::fmt(t, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub(crate) fn invalidate_current_thread_spans() {
|
||||
if inside_proc_macro() {
|
||||
panic!(
|
||||
"proc_macro2::extra::invalidate_current_thread_spans is not available in procedural macros"
|
||||
);
|
||||
} else {
|
||||
crate::fallback::invalidate_current_thread_spans();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
# `quote`
|
||||
|
||||
These source files come from the Rust `quote` crate, version 1.0.40
|
||||
(released 2025-03-12), hosted in the <https://github.com/dtolnay/quote>
|
||||
repository, licensed under "Apache-2.0 OR MIT" and only modified to add
|
||||
the SPDX license identifiers.
|
||||
|
||||
For copyright details, please see:
|
||||
|
||||
https://github.com/dtolnay/quote/blob/1.0.40/README.md#license
|
||||
https://github.com/dtolnay/quote/blob/1.0.40/LICENSE-APACHE
|
||||
https://github.com/dtolnay/quote/blob/1.0.40/LICENSE-MIT
|
||||
|
|
@ -0,0 +1,112 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use super::ToTokens;
|
||||
use core::iter;
|
||||
use proc_macro2::{TokenStream, TokenTree};
|
||||
|
||||
/// TokenStream extension trait with methods for appending tokens.
|
||||
///
|
||||
/// This trait is sealed and cannot be implemented outside of the `quote` crate.
|
||||
pub trait TokenStreamExt: private::Sealed {
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends the token specified to this list of tokens.
|
||||
fn append<U>(&mut self, token: U)
|
||||
where
|
||||
U: Into<TokenTree>;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::{quote, TokenStreamExt, ToTokens};
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// #
|
||||
/// struct X;
|
||||
///
|
||||
/// impl ToTokens for X {
|
||||
/// fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// tokens.append_all(&[true, false]);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// let tokens = quote!(#X);
|
||||
/// assert_eq!(tokens.to_string(), "true false");
|
||||
/// ```
|
||||
fn append_all<I>(&mut self, iter: I)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all of the items in the iterator `I`, separated by the tokens
|
||||
/// `U`.
|
||||
fn append_separated<I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
U: ToTokens;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all tokens in the iterator `I`, appending `U` after each
|
||||
/// element, including after the last element of the iterator.
|
||||
fn append_terminated<I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
U: ToTokens;
|
||||
}
|
||||
|
||||
impl TokenStreamExt for TokenStream {
|
||||
fn append<U>(&mut self, token: U)
|
||||
where
|
||||
U: Into<TokenTree>,
|
||||
{
|
||||
self.extend(iter::once(token.into()));
|
||||
}
|
||||
|
||||
fn append_all<I>(&mut self, iter: I)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
{
|
||||
for token in iter {
|
||||
token.to_tokens(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_separated<I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
U: ToTokens,
|
||||
{
|
||||
for (i, token) in iter.into_iter().enumerate() {
|
||||
if i > 0 {
|
||||
op.to_tokens(self);
|
||||
}
|
||||
token.to_tokens(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_terminated<I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
U: ToTokens,
|
||||
{
|
||||
for token in iter {
|
||||
token.to_tokens(self);
|
||||
term.to_tokens(self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod private {
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
pub trait Sealed {}
|
||||
|
||||
impl Sealed for TokenStream {}
|
||||
}
|
||||
|
|
@ -0,0 +1,170 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
/// Formatting macro for constructing `Ident`s.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// Syntax is copied from the [`format!`] macro, supporting both positional and
|
||||
/// named arguments.
|
||||
///
|
||||
/// Only a limited set of formatting traits are supported. The current mapping
|
||||
/// of format types to traits is:
|
||||
///
|
||||
/// * `{}` ⇒ [`IdentFragment`]
|
||||
/// * `{:o}` ⇒ [`Octal`](std::fmt::Octal)
|
||||
/// * `{:x}` ⇒ [`LowerHex`](std::fmt::LowerHex)
|
||||
/// * `{:X}` ⇒ [`UpperHex`](std::fmt::UpperHex)
|
||||
/// * `{:b}` ⇒ [`Binary`](std::fmt::Binary)
|
||||
///
|
||||
/// See [`std::fmt`] for more information.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # IdentFragment
|
||||
///
|
||||
/// Unlike `format!`, this macro uses the [`IdentFragment`] formatting trait by
|
||||
/// default. This trait is like `Display`, with a few differences:
|
||||
///
|
||||
/// * `IdentFragment` is only implemented for a limited set of types, such as
|
||||
/// unsigned integers and strings.
|
||||
/// * [`Ident`] arguments will have their `r#` prefixes stripped, if present.
|
||||
///
|
||||
/// [`IdentFragment`]: crate::IdentFragment
|
||||
/// [`Ident`]: proc_macro2::Ident
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Hygiene
|
||||
///
|
||||
/// The [`Span`] of the first `Ident` argument is used as the span of the final
|
||||
/// identifier, falling back to [`Span::call_site`] when no identifiers are
|
||||
/// provided.
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// # let ident = format_ident!("Ident");
|
||||
/// // If `ident` is an Ident, the span of `my_ident` will be inherited from it.
|
||||
/// let my_ident = format_ident!("My{}{}", ident, "IsCool");
|
||||
/// assert_eq!(my_ident, "MyIdentIsCool");
|
||||
/// ```
|
||||
///
|
||||
/// Alternatively, the span can be overridden by passing the `span` named
|
||||
/// argument.
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// # const IGNORE_TOKENS: &'static str = stringify! {
|
||||
/// let my_span = /* ... */;
|
||||
/// # };
|
||||
/// # let my_span = proc_macro2::Span::call_site();
|
||||
/// format_ident!("MyIdent", span = my_span);
|
||||
/// ```
|
||||
///
|
||||
/// [`Span`]: proc_macro2::Span
|
||||
/// [`Span::call_site`]: proc_macro2::Span::call_site
|
||||
///
|
||||
/// <p><br></p>
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This method will panic if the resulting formatted string is not a valid
|
||||
/// identifier.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Composing raw and non-raw identifiers:
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// let my_ident = format_ident!("My{}", "Ident");
|
||||
/// assert_eq!(my_ident, "MyIdent");
|
||||
///
|
||||
/// let raw = format_ident!("r#Raw");
|
||||
/// assert_eq!(raw, "r#Raw");
|
||||
///
|
||||
/// let my_ident_raw = format_ident!("{}Is{}", my_ident, raw);
|
||||
/// assert_eq!(my_ident_raw, "MyIdentIsRaw");
|
||||
/// ```
|
||||
///
|
||||
/// Integer formatting options:
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// let num: u32 = 10;
|
||||
///
|
||||
/// let decimal = format_ident!("Id_{}", num);
|
||||
/// assert_eq!(decimal, "Id_10");
|
||||
///
|
||||
/// let octal = format_ident!("Id_{:o}", num);
|
||||
/// assert_eq!(octal, "Id_12");
|
||||
///
|
||||
/// let binary = format_ident!("Id_{:b}", num);
|
||||
/// assert_eq!(binary, "Id_1010");
|
||||
///
|
||||
/// let lower_hex = format_ident!("Id_{:x}", num);
|
||||
/// assert_eq!(lower_hex, "Id_a");
|
||||
///
|
||||
/// let upper_hex = format_ident!("Id_{:X}", num);
|
||||
/// assert_eq!(upper_hex, "Id_A");
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! format_ident {
|
||||
($fmt:expr) => {
|
||||
$crate::format_ident_impl!([
|
||||
$crate::__private::Option::None,
|
||||
$fmt
|
||||
])
|
||||
};
|
||||
|
||||
($fmt:expr, $($rest:tt)*) => {
|
||||
$crate::format_ident_impl!([
|
||||
$crate::__private::Option::None,
|
||||
$fmt
|
||||
] $($rest)*)
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
#[doc(hidden)]
|
||||
macro_rules! format_ident_impl {
|
||||
// Final state
|
||||
([$span:expr, $($fmt:tt)*]) => {
|
||||
$crate::__private::mk_ident(
|
||||
&$crate::__private::format!($($fmt)*),
|
||||
$span,
|
||||
)
|
||||
};
|
||||
|
||||
// Span argument
|
||||
([$old:expr, $($fmt:tt)*] span = $span:expr) => {
|
||||
$crate::format_ident_impl!([$old, $($fmt)*] span = $span,)
|
||||
};
|
||||
([$old:expr, $($fmt:tt)*] span = $span:expr, $($rest:tt)*) => {
|
||||
$crate::format_ident_impl!([
|
||||
$crate::__private::Option::Some::<$crate::__private::Span>($span),
|
||||
$($fmt)*
|
||||
] $($rest)*)
|
||||
};
|
||||
|
||||
// Named argument
|
||||
([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr) => {
|
||||
$crate::format_ident_impl!([$span, $($fmt)*] $name = $arg,)
|
||||
};
|
||||
([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr, $($rest:tt)*) => {
|
||||
match $crate::__private::IdentFragmentAdapter(&$arg) {
|
||||
arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*, $name = arg] $($rest)*),
|
||||
}
|
||||
};
|
||||
|
||||
// Positional argument
|
||||
([$span:expr, $($fmt:tt)*] $arg:expr) => {
|
||||
$crate::format_ident_impl!([$span, $($fmt)*] $arg,)
|
||||
};
|
||||
([$span:expr, $($fmt:tt)*] $arg:expr, $($rest:tt)*) => {
|
||||
match $crate::__private::IdentFragmentAdapter(&$arg) {
|
||||
arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*, arg] $($rest)*),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use alloc::borrow::Cow;
|
||||
use core::fmt;
|
||||
use proc_macro2::{Ident, Span};
|
||||
|
||||
/// Specialized formatting trait used by `format_ident!`.
|
||||
///
|
||||
/// [`Ident`] arguments formatted using this trait will have their `r#` prefix
|
||||
/// stripped, if present.
|
||||
///
|
||||
/// See [`format_ident!`] for more information.
|
||||
///
|
||||
/// [`format_ident!`]: crate::format_ident
|
||||
pub trait IdentFragment {
|
||||
/// Format this value as an identifier fragment.
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result;
|
||||
|
||||
/// Span associated with this `IdentFragment`.
|
||||
///
|
||||
/// If non-`None`, may be inherited by formatted identifiers.
|
||||
fn span(&self) -> Option<Span> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + ?Sized> IdentFragment for &T {
|
||||
fn span(&self) -> Option<Span> {
|
||||
<T as IdentFragment>::span(*self)
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
IdentFragment::fmt(*self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + ?Sized> IdentFragment for &mut T {
|
||||
fn span(&self) -> Option<Span> {
|
||||
<T as IdentFragment>::span(*self)
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
IdentFragment::fmt(*self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl IdentFragment for Ident {
|
||||
fn span(&self) -> Option<Span> {
|
||||
Some(self.span())
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let id = self.to_string();
|
||||
if let Some(id) = id.strip_prefix("r#") {
|
||||
fmt::Display::fmt(id, f)
|
||||
} else {
|
||||
fmt::Display::fmt(&id[..], f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IdentFragment for Cow<'_, T>
|
||||
where
|
||||
T: IdentFragment + ToOwned + ?Sized,
|
||||
{
|
||||
fn span(&self) -> Option<Span> {
|
||||
T::span(self)
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
T::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
// Limited set of types which this is implemented for, as we want to avoid types
|
||||
// which will often include non-identifier characters in their `Display` impl.
|
||||
macro_rules! ident_fragment_display {
|
||||
($($T:ty),*) => {
|
||||
$(
|
||||
impl IdentFragment for $T {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
ident_fragment_display!(bool, str, String, char);
|
||||
ident_fragment_display!(u8, u16, u32, u64, u128, usize);
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,494 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use self::get_span::{GetSpan, GetSpanBase, GetSpanInner};
|
||||
use crate::{IdentFragment, ToTokens, TokenStreamExt};
|
||||
use core::fmt;
|
||||
use core::iter;
|
||||
use core::ops::BitOr;
|
||||
use proc_macro2::{Group, Ident, Punct, Spacing, TokenTree};
|
||||
|
||||
#[doc(hidden)]
|
||||
pub use alloc::format;
|
||||
#[doc(hidden)]
|
||||
pub use core::option::Option;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub type Delimiter = proc_macro2::Delimiter;
|
||||
#[doc(hidden)]
|
||||
pub type Span = proc_macro2::Span;
|
||||
#[doc(hidden)]
|
||||
pub type TokenStream = proc_macro2::TokenStream;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct HasIterator; // True
|
||||
#[doc(hidden)]
|
||||
pub struct ThereIsNoIteratorInRepetition; // False
|
||||
|
||||
impl BitOr<ThereIsNoIteratorInRepetition> for ThereIsNoIteratorInRepetition {
|
||||
type Output = ThereIsNoIteratorInRepetition;
|
||||
fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> ThereIsNoIteratorInRepetition {
|
||||
ThereIsNoIteratorInRepetition
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<ThereIsNoIteratorInRepetition> for HasIterator {
|
||||
type Output = HasIterator;
|
||||
fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> HasIterator {
|
||||
HasIterator
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<HasIterator> for ThereIsNoIteratorInRepetition {
|
||||
type Output = HasIterator;
|
||||
fn bitor(self, _rhs: HasIterator) -> HasIterator {
|
||||
HasIterator
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<HasIterator> for HasIterator {
|
||||
type Output = HasIterator;
|
||||
fn bitor(self, _rhs: HasIterator) -> HasIterator {
|
||||
HasIterator
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension traits used by the implementation of `quote!`. These are defined
|
||||
/// in separate traits, rather than as a single trait due to ambiguity issues.
|
||||
///
|
||||
/// These traits expose a `quote_into_iter` method which should allow calling
|
||||
/// whichever impl happens to be applicable. Calling that method repeatedly on
|
||||
/// the returned value should be idempotent.
|
||||
#[doc(hidden)]
|
||||
pub mod ext {
|
||||
use super::RepInterp;
|
||||
use super::{HasIterator as HasIter, ThereIsNoIteratorInRepetition as DoesNotHaveIter};
|
||||
use crate::ToTokens;
|
||||
use alloc::collections::btree_set::{self, BTreeSet};
|
||||
use core::slice;
|
||||
|
||||
/// Extension trait providing the `quote_into_iter` method on iterators.
|
||||
#[doc(hidden)]
|
||||
pub trait RepIteratorExt: Iterator + Sized {
|
||||
fn quote_into_iter(self) -> (Self, HasIter) {
|
||||
(self, HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Iterator> RepIteratorExt for T {}
|
||||
|
||||
/// Extension trait providing the `quote_into_iter` method for
|
||||
/// non-iterable types. These types interpolate the same value in each
|
||||
/// iteration of the repetition.
|
||||
#[doc(hidden)]
|
||||
pub trait RepToTokensExt {
|
||||
/// Pretend to be an iterator for the purposes of `quote_into_iter`.
|
||||
/// This allows repeated calls to `quote_into_iter` to continue
|
||||
/// correctly returning DoesNotHaveIter.
|
||||
fn next(&self) -> Option<&Self> {
|
||||
Some(self)
|
||||
}
|
||||
|
||||
fn quote_into_iter(&self) -> (&Self, DoesNotHaveIter) {
|
||||
(self, DoesNotHaveIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens + ?Sized> RepToTokensExt for T {}
|
||||
|
||||
/// Extension trait providing the `quote_into_iter` method for types that
|
||||
/// can be referenced as an iterator.
|
||||
#[doc(hidden)]
|
||||
pub trait RepAsIteratorExt<'q> {
|
||||
type Iter: Iterator;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter);
|
||||
}
|
||||
|
||||
impl<'q, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &T {
|
||||
type Iter = T::Iter;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
<T as RepAsIteratorExt>::quote_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &mut T {
|
||||
type Iter = T::Iter;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
<T as RepAsIteratorExt>::quote_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for [T] {
|
||||
type Iter = slice::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q, const N: usize> RepAsIteratorExt<'q> for [T; N] {
|
||||
type Iter = slice::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for Vec<T> {
|
||||
type Iter = slice::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for BTreeSet<T> {
|
||||
type Iter = btree_set::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: RepAsIteratorExt<'q>> RepAsIteratorExt<'q> for RepInterp<T> {
|
||||
type Iter = T::Iter;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
self.0.quote_into_iter()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper type used within interpolations to allow for repeated binding names.
|
||||
// Implements the relevant traits, and exports a dummy `next()` method.
|
||||
#[derive(Copy, Clone)]
|
||||
#[doc(hidden)]
|
||||
pub struct RepInterp<T>(pub T);
|
||||
|
||||
impl<T> RepInterp<T> {
|
||||
// This method is intended to look like `Iterator::next`, and is called when
|
||||
// a name is bound multiple times, as the previous binding will shadow the
|
||||
// original `Iterator` object. This allows us to avoid advancing the
|
||||
// iterator multiple times per iteration.
|
||||
pub fn next(self) -> Option<T> {
|
||||
Some(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Iterator> Iterator for RepInterp<T> {
|
||||
type Item = T::Item;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens> ToTokens for RepInterp<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.0.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[inline]
|
||||
pub fn get_span<T>(span: T) -> GetSpan<T> {
|
||||
GetSpan(GetSpanInner(GetSpanBase(span)))
|
||||
}
|
||||
|
||||
mod get_span {
|
||||
use core::ops::Deref;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::Span;
|
||||
|
||||
pub struct GetSpan<T>(pub(crate) GetSpanInner<T>);
|
||||
|
||||
pub struct GetSpanInner<T>(pub(crate) GetSpanBase<T>);
|
||||
|
||||
pub struct GetSpanBase<T>(pub(crate) T);
|
||||
|
||||
impl GetSpan<Span> {
|
||||
#[inline]
|
||||
pub fn __into_span(self) -> Span {
|
||||
((self.0).0).0
|
||||
}
|
||||
}
|
||||
|
||||
impl GetSpanInner<DelimSpan> {
|
||||
#[inline]
|
||||
pub fn __into_span(&self) -> Span {
|
||||
(self.0).0.join()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> GetSpanBase<T> {
|
||||
#[allow(clippy::unused_self)]
|
||||
pub fn __into_span(&self) -> T {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for GetSpan<T> {
|
||||
type Target = GetSpanInner<T>;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for GetSpanInner<T> {
|
||||
type Target = GetSpanBase<T>;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn push_group(tokens: &mut TokenStream, delimiter: Delimiter, inner: TokenStream) {
|
||||
tokens.append(Group::new(delimiter, inner));
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn push_group_spanned(
|
||||
tokens: &mut TokenStream,
|
||||
span: Span,
|
||||
delimiter: Delimiter,
|
||||
inner: TokenStream,
|
||||
) {
|
||||
let mut g = Group::new(delimiter, inner);
|
||||
g.set_span(span);
|
||||
tokens.append(g);
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn parse(tokens: &mut TokenStream, s: &str) {
|
||||
let s: TokenStream = s.parse().expect("invalid token stream");
|
||||
tokens.extend(iter::once(s));
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn parse_spanned(tokens: &mut TokenStream, span: Span, s: &str) {
|
||||
let s: TokenStream = s.parse().expect("invalid token stream");
|
||||
tokens.extend(s.into_iter().map(|t| respan_token_tree(t, span)));
|
||||
}
|
||||
|
||||
// Token tree with every span replaced by the given one.
|
||||
fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
|
||||
match &mut token {
|
||||
TokenTree::Group(g) => {
|
||||
let stream = g
|
||||
.stream()
|
||||
.into_iter()
|
||||
.map(|token| respan_token_tree(token, span))
|
||||
.collect();
|
||||
*g = Group::new(g.delimiter(), stream);
|
||||
g.set_span(span);
|
||||
}
|
||||
other => other.set_span(span),
|
||||
}
|
||||
token
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn push_ident(tokens: &mut TokenStream, s: &str) {
|
||||
let span = Span::call_site();
|
||||
push_ident_spanned(tokens, span, s);
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn push_ident_spanned(tokens: &mut TokenStream, span: Span, s: &str) {
|
||||
tokens.append(ident_maybe_raw(s, span));
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn push_lifetime(tokens: &mut TokenStream, lifetime: &str) {
|
||||
tokens.extend([
|
||||
TokenTree::Punct(Punct::new('\'', Spacing::Joint)),
|
||||
TokenTree::Ident(Ident::new(&lifetime[1..], Span::call_site())),
|
||||
]);
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn push_lifetime_spanned(tokens: &mut TokenStream, span: Span, lifetime: &str) {
|
||||
tokens.extend([
|
||||
TokenTree::Punct({
|
||||
let mut apostrophe = Punct::new('\'', Spacing::Joint);
|
||||
apostrophe.set_span(span);
|
||||
apostrophe
|
||||
}),
|
||||
TokenTree::Ident(Ident::new(&lifetime[1..], span)),
|
||||
]);
|
||||
}
|
||||
|
||||
macro_rules! push_punct {
|
||||
($name:ident $spanned:ident $char1:tt) => {
|
||||
#[doc(hidden)]
|
||||
pub fn $name(tokens: &mut TokenStream) {
|
||||
tokens.append(Punct::new($char1, Spacing::Alone));
|
||||
}
|
||||
#[doc(hidden)]
|
||||
pub fn $spanned(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $spanned:ident $char1:tt $char2:tt) => {
|
||||
#[doc(hidden)]
|
||||
pub fn $name(tokens: &mut TokenStream) {
|
||||
tokens.append(Punct::new($char1, Spacing::Joint));
|
||||
tokens.append(Punct::new($char2, Spacing::Alone));
|
||||
}
|
||||
#[doc(hidden)]
|
||||
pub fn $spanned(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $spanned:ident $char1:tt $char2:tt $char3:tt) => {
|
||||
#[doc(hidden)]
|
||||
pub fn $name(tokens: &mut TokenStream) {
|
||||
tokens.append(Punct::new($char1, Spacing::Joint));
|
||||
tokens.append(Punct::new($char2, Spacing::Joint));
|
||||
tokens.append(Punct::new($char3, Spacing::Alone));
|
||||
}
|
||||
#[doc(hidden)]
|
||||
pub fn $spanned(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char3, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
push_punct!(push_add push_add_spanned '+');
|
||||
push_punct!(push_add_eq push_add_eq_spanned '+' '=');
|
||||
push_punct!(push_and push_and_spanned '&');
|
||||
push_punct!(push_and_and push_and_and_spanned '&' '&');
|
||||
push_punct!(push_and_eq push_and_eq_spanned '&' '=');
|
||||
push_punct!(push_at push_at_spanned '@');
|
||||
push_punct!(push_bang push_bang_spanned '!');
|
||||
push_punct!(push_caret push_caret_spanned '^');
|
||||
push_punct!(push_caret_eq push_caret_eq_spanned '^' '=');
|
||||
push_punct!(push_colon push_colon_spanned ':');
|
||||
push_punct!(push_colon2 push_colon2_spanned ':' ':');
|
||||
push_punct!(push_comma push_comma_spanned ',');
|
||||
push_punct!(push_div push_div_spanned '/');
|
||||
push_punct!(push_div_eq push_div_eq_spanned '/' '=');
|
||||
push_punct!(push_dot push_dot_spanned '.');
|
||||
push_punct!(push_dot2 push_dot2_spanned '.' '.');
|
||||
push_punct!(push_dot3 push_dot3_spanned '.' '.' '.');
|
||||
push_punct!(push_dot_dot_eq push_dot_dot_eq_spanned '.' '.' '=');
|
||||
push_punct!(push_eq push_eq_spanned '=');
|
||||
push_punct!(push_eq_eq push_eq_eq_spanned '=' '=');
|
||||
push_punct!(push_ge push_ge_spanned '>' '=');
|
||||
push_punct!(push_gt push_gt_spanned '>');
|
||||
push_punct!(push_le push_le_spanned '<' '=');
|
||||
push_punct!(push_lt push_lt_spanned '<');
|
||||
push_punct!(push_mul_eq push_mul_eq_spanned '*' '=');
|
||||
push_punct!(push_ne push_ne_spanned '!' '=');
|
||||
push_punct!(push_or push_or_spanned '|');
|
||||
push_punct!(push_or_eq push_or_eq_spanned '|' '=');
|
||||
push_punct!(push_or_or push_or_or_spanned '|' '|');
|
||||
push_punct!(push_pound push_pound_spanned '#');
|
||||
push_punct!(push_question push_question_spanned '?');
|
||||
push_punct!(push_rarrow push_rarrow_spanned '-' '>');
|
||||
push_punct!(push_larrow push_larrow_spanned '<' '-');
|
||||
push_punct!(push_rem push_rem_spanned '%');
|
||||
push_punct!(push_rem_eq push_rem_eq_spanned '%' '=');
|
||||
push_punct!(push_fat_arrow push_fat_arrow_spanned '=' '>');
|
||||
push_punct!(push_semi push_semi_spanned ';');
|
||||
push_punct!(push_shl push_shl_spanned '<' '<');
|
||||
push_punct!(push_shl_eq push_shl_eq_spanned '<' '<' '=');
|
||||
push_punct!(push_shr push_shr_spanned '>' '>');
|
||||
push_punct!(push_shr_eq push_shr_eq_spanned '>' '>' '=');
|
||||
push_punct!(push_star push_star_spanned '*');
|
||||
push_punct!(push_sub push_sub_spanned '-');
|
||||
push_punct!(push_sub_eq push_sub_eq_spanned '-' '=');
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn push_underscore(tokens: &mut TokenStream) {
|
||||
push_underscore_spanned(tokens, Span::call_site());
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn push_underscore_spanned(tokens: &mut TokenStream, span: Span) {
|
||||
tokens.append(Ident::new("_", span));
|
||||
}
|
||||
|
||||
// Helper method for constructing identifiers from the `format_ident!` macro,
|
||||
// handling `r#` prefixes.
|
||||
#[doc(hidden)]
|
||||
pub fn mk_ident(id: &str, span: Option<Span>) -> Ident {
|
||||
let span = span.unwrap_or_else(Span::call_site);
|
||||
ident_maybe_raw(id, span)
|
||||
}
|
||||
|
||||
fn ident_maybe_raw(id: &str, span: Span) -> Ident {
|
||||
if let Some(id) = id.strip_prefix("r#") {
|
||||
Ident::new_raw(id, span)
|
||||
} else {
|
||||
Ident::new(id, span)
|
||||
}
|
||||
}
|
||||
|
||||
// Adapts from `IdentFragment` to `fmt::Display` for use by the `format_ident!`
|
||||
// macro, and exposes span information from these fragments.
|
||||
//
|
||||
// This struct also has forwarding implementations of the formatting traits
|
||||
// `Octal`, `LowerHex`, `UpperHex`, and `Binary` to allow for their use within
|
||||
// `format_ident!`.
|
||||
#[derive(Copy, Clone)]
|
||||
#[doc(hidden)]
|
||||
pub struct IdentFragmentAdapter<T: IdentFragment>(pub T);
|
||||
|
||||
impl<T: IdentFragment> IdentFragmentAdapter<T> {
|
||||
pub fn span(&self) -> Option<Span> {
|
||||
self.0.span()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment> fmt::Display for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
IdentFragment::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::Octal> fmt::Octal for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Octal::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::LowerHex> fmt::LowerHex for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::LowerHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::UpperHex> fmt::UpperHex for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::UpperHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::Binary> fmt::Binary for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Binary::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::ToTokens;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
|
||||
// Not public API other than via the syn crate. Use syn::spanned::Spanned.
|
||||
pub trait Spanned: private::Sealed {
|
||||
fn __span(&self) -> Span;
|
||||
}
|
||||
|
||||
impl Spanned for Span {
|
||||
fn __span(&self) -> Span {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl Spanned for DelimSpan {
|
||||
fn __span(&self) -> Span {
|
||||
self.join()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> Spanned for T {
|
||||
fn __span(&self) -> Span {
|
||||
join_spans(self.into_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
fn join_spans(tokens: TokenStream) -> Span {
|
||||
let mut iter = tokens.into_iter().map(|tt| tt.span());
|
||||
|
||||
let first = match iter.next() {
|
||||
Some(span) => span,
|
||||
None => return Span::call_site(),
|
||||
};
|
||||
|
||||
iter.fold(None, |_prev, next| Some(next))
|
||||
.and_then(|last| first.join(last))
|
||||
.unwrap_or(first)
|
||||
}
|
||||
|
||||
mod private {
|
||||
use crate::ToTokens;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::Span;
|
||||
|
||||
pub trait Sealed {}
|
||||
impl Sealed for Span {}
|
||||
impl Sealed for DelimSpan {}
|
||||
impl<T: ?Sized + ToTokens> Sealed for T {}
|
||||
}
|
||||
|
|
@ -0,0 +1,273 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use super::TokenStreamExt;
|
||||
use alloc::borrow::Cow;
|
||||
use alloc::rc::Rc;
|
||||
use core::iter;
|
||||
use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
||||
use std::ffi::{CStr, CString};
|
||||
|
||||
/// Types that can be interpolated inside a `quote!` invocation.
|
||||
pub trait ToTokens {
|
||||
/// Write `self` to the given `TokenStream`.
|
||||
///
|
||||
/// The token append methods provided by the [`TokenStreamExt`] extension
|
||||
/// trait may be useful for implementing `ToTokens`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Example implementation for a struct representing Rust paths like
|
||||
/// `std::cmp::PartialEq`:
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::{TokenTree, Spacing, Span, Punct, TokenStream};
|
||||
/// use quote::{TokenStreamExt, ToTokens};
|
||||
///
|
||||
/// pub struct Path {
|
||||
/// pub global: bool,
|
||||
/// pub segments: Vec<PathSegment>,
|
||||
/// }
|
||||
///
|
||||
/// impl ToTokens for Path {
|
||||
/// fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// for (i, segment) in self.segments.iter().enumerate() {
|
||||
/// if i > 0 || self.global {
|
||||
/// // Double colon `::`
|
||||
/// tokens.append(Punct::new(':', Spacing::Joint));
|
||||
/// tokens.append(Punct::new(':', Spacing::Alone));
|
||||
/// }
|
||||
/// segment.to_tokens(tokens);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # pub struct PathSegment;
|
||||
/// #
|
||||
/// # impl ToTokens for PathSegment {
|
||||
/// # fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// # unimplemented!()
|
||||
/// # }
|
||||
/// # }
|
||||
/// ```
|
||||
fn to_tokens(&self, tokens: &mut TokenStream);
|
||||
|
||||
/// Convert `self` directly into a `TokenStream` object.
|
||||
///
|
||||
/// This method is implicitly implemented using `to_tokens`, and acts as a
|
||||
/// convenience method for consumers of the `ToTokens` trait.
|
||||
fn to_token_stream(&self) -> TokenStream {
|
||||
let mut tokens = TokenStream::new();
|
||||
self.to_tokens(&mut tokens);
|
||||
tokens
|
||||
}
|
||||
|
||||
/// Convert `self` directly into a `TokenStream` object.
|
||||
///
|
||||
/// This method is implicitly implemented using `to_tokens`, and acts as a
|
||||
/// convenience method for consumers of the `ToTokens` trait.
|
||||
fn into_token_stream(self) -> TokenStream
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.to_token_stream()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> ToTokens for &T {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> ToTokens for &mut T {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> ToTokens for Rc<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens> ToTokens for Option<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if let Some(t) = self {
|
||||
t.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for str {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::string(self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for String {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.as_str().to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for i8 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::i8_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for i16 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::i16_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for i32 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::i32_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for i64 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::i64_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for i128 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::i128_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for isize {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::isize_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for u8 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::u8_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for u16 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::u16_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for u32 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::u32_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for u64 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::u64_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for u128 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::u128_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for usize {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::usize_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for f32 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::f32_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for f64 {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::f64_suffixed(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for char {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::character(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for bool {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let word = if *self { "true" } else { "false" };
|
||||
tokens.append(Ident::new(word, Span::call_site()));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for CStr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::c_string(self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for CString {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::c_string(self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Group {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Ident {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Punct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Literal {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TokenTree {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TokenStream {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.extend(iter::once(self.clone()));
|
||||
}
|
||||
|
||||
fn into_token_stream(self) -> TokenStream {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
# `syn`
|
||||
|
||||
These source files come from the Rust `syn` crate, version 2.0.106
|
||||
(released 2025-08-16), hosted in the <https://github.com/dtolnay/syn>
|
||||
repository, licensed under "Apache-2.0 OR MIT" and only modified to add
|
||||
the SPDX license identifiers and to remove the `unicode-ident`
|
||||
dependency.
|
||||
|
||||
For copyright details, please see:
|
||||
|
||||
https://github.com/dtolnay/syn/blob/2.0.106/README.md#license
|
||||
https://github.com/dtolnay/syn/blob/2.0.106/LICENSE-APACHE
|
||||
https://github.com/dtolnay/syn/blob/2.0.106/LICENSE-MIT
|
||||
|
|
@ -0,0 +1,838 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::error::Error;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::error::Result;
|
||||
use crate::expr::Expr;
|
||||
use crate::mac::MacroDelimiter;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::meta::{self, ParseNestedMeta};
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, ParseStream, Parser};
|
||||
use crate::path::Path;
|
||||
use crate::token;
|
||||
use proc_macro2::TokenStream;
|
||||
#[cfg(feature = "printing")]
|
||||
use std::iter;
|
||||
#[cfg(feature = "printing")]
|
||||
use std::slice;
|
||||
|
||||
ast_struct! {
|
||||
/// An attribute, like `#[repr(transparent)]`.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// Rust has six types of attributes.
|
||||
///
|
||||
/// - Outer attributes like `#[repr(transparent)]`. These appear outside or
|
||||
/// in front of the item they describe.
|
||||
///
|
||||
/// - Inner attributes like `#![feature(proc_macro)]`. These appear inside
|
||||
/// of the item they describe, usually a module.
|
||||
///
|
||||
/// - Outer one-line doc comments like `/// Example`.
|
||||
///
|
||||
/// - Inner one-line doc comments like `//! Please file an issue`.
|
||||
///
|
||||
/// - Outer documentation blocks `/** Example */`.
|
||||
///
|
||||
/// - Inner documentation blocks `/*! Please file an issue */`.
|
||||
///
|
||||
/// The `style` field of type `AttrStyle` distinguishes whether an attribute
|
||||
/// is outer or inner.
|
||||
///
|
||||
/// Every attribute has a `path` that indicates the intended interpretation
|
||||
/// of the rest of the attribute's contents. The path and the optional
|
||||
/// additional contents are represented together in the `meta` field of the
|
||||
/// attribute in three possible varieties:
|
||||
///
|
||||
/// - Meta::Path — attributes whose information content conveys just a
|
||||
/// path, for example the `#[test]` attribute.
|
||||
///
|
||||
/// - Meta::List — attributes that carry arbitrary tokens after the
|
||||
/// path, surrounded by a delimiter (parenthesis, bracket, or brace). For
|
||||
/// example `#[derive(Copy)]` or `#[precondition(x < 5)]`.
|
||||
///
|
||||
/// - Meta::NameValue — attributes with an `=` sign after the path,
|
||||
/// followed by a Rust expression. For example `#[path =
|
||||
/// "sys/windows.rs"]`.
|
||||
///
|
||||
/// All doc comments are represented in the NameValue style with a path of
|
||||
/// "doc", as this is how they are processed by the compiler and by
|
||||
/// `macro_rules!` macros.
|
||||
///
|
||||
/// ```text
|
||||
/// #[derive(Copy, Clone)]
|
||||
/// ~~~~~~Path
|
||||
/// ^^^^^^^^^^^^^^^^^^^Meta::List
|
||||
///
|
||||
/// #[path = "sys/windows.rs"]
|
||||
/// ~~~~Path
|
||||
/// ^^^^^^^^^^^^^^^^^^^^^^^Meta::NameValue
|
||||
///
|
||||
/// #[test]
|
||||
/// ^^^^Meta::Path
|
||||
/// ```
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Parsing from tokens to Attribute
|
||||
///
|
||||
/// This type does not implement the [`Parse`] trait and thus cannot be
|
||||
/// parsed directly by [`ParseStream::parse`]. Instead use
|
||||
/// [`ParseStream::call`] with one of the two parser functions
|
||||
/// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on
|
||||
/// which you intend to parse.
|
||||
///
|
||||
/// [`Parse`]: crate::parse::Parse
|
||||
/// [`ParseStream::parse`]: crate::parse::ParseBuffer::parse
|
||||
/// [`ParseStream::call`]: crate::parse::ParseBuffer::call
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Attribute, Ident, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // Parses a unit struct with attributes.
|
||||
/// //
|
||||
/// // #[path = "s.tmpl"]
|
||||
/// // struct S;
|
||||
/// struct UnitStruct {
|
||||
/// attrs: Vec<Attribute>,
|
||||
/// struct_token: Token![struct],
|
||||
/// name: Ident,
|
||||
/// semi_token: Token![;],
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for UnitStruct {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// Ok(UnitStruct {
|
||||
/// attrs: input.call(Attribute::parse_outer)?,
|
||||
/// struct_token: input.parse()?,
|
||||
/// name: input.parse()?,
|
||||
/// semi_token: input.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// <p><br></p>
|
||||
///
|
||||
/// # Parsing from Attribute to structured arguments
|
||||
///
|
||||
/// The grammar of attributes in Rust is very flexible, which makes the
|
||||
/// syntax tree not that useful on its own. In particular, arguments of the
|
||||
/// `Meta::List` variety of attribute are held in an arbitrary `tokens:
|
||||
/// TokenStream`. Macros are expected to check the `path` of the attribute,
|
||||
/// decide whether they recognize it, and then parse the remaining tokens
|
||||
/// according to whatever grammar they wish to require for that kind of
|
||||
/// attribute. Use [`parse_args()`] to parse those tokens into the expected
|
||||
/// data structure.
|
||||
///
|
||||
/// [`parse_args()`]: Attribute::parse_args
|
||||
///
|
||||
/// <p><br></p>
|
||||
///
|
||||
/// # Doc comments
|
||||
///
|
||||
/// The compiler transforms doc comments, such as `/// comment` and `/*!
|
||||
/// comment */`, into attributes before macros are expanded. Each comment is
|
||||
/// expanded into an attribute of the form `#[doc = r"comment"]`.
|
||||
///
|
||||
/// As an example, the following `mod` items are expanded identically:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{ItemMod, parse_quote};
|
||||
/// let doc: ItemMod = parse_quote! {
|
||||
/// /// Single line doc comments
|
||||
/// /// We write so many!
|
||||
/// /**
|
||||
/// * Multi-line comments...
|
||||
/// * May span many lines
|
||||
/// */
|
||||
/// mod example {
|
||||
/// //! Of course, they can be inner too
|
||||
/// /*! And fit in a single line */
|
||||
/// }
|
||||
/// };
|
||||
/// let attr: ItemMod = parse_quote! {
|
||||
/// #[doc = r" Single line doc comments"]
|
||||
/// #[doc = r" We write so many!"]
|
||||
/// #[doc = r"
|
||||
/// * Multi-line comments...
|
||||
/// * May span many lines
|
||||
/// "]
|
||||
/// mod example {
|
||||
/// #![doc = r" Of course, they can be inner too"]
|
||||
/// #![doc = r" And fit in a single line "]
|
||||
/// }
|
||||
/// };
|
||||
/// assert_eq!(doc, attr);
|
||||
/// ```
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Attribute {
|
||||
pub pound_token: Token![#],
|
||||
pub style: AttrStyle,
|
||||
pub bracket_token: token::Bracket,
|
||||
pub meta: Meta,
|
||||
}
|
||||
}
|
||||
|
||||
impl Attribute {
|
||||
/// Returns the path that identifies the interpretation of this attribute.
|
||||
///
|
||||
/// For example this would return the `test` in `#[test]`, the `derive` in
|
||||
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
|
||||
pub fn path(&self) -> &Path {
|
||||
self.meta.path()
|
||||
}
|
||||
|
||||
/// Parse the arguments to the attribute as a syntax tree.
|
||||
///
|
||||
/// This is similar to pulling out the `TokenStream` from `Meta::List` and
|
||||
/// doing `syn::parse2::<T>(meta_list.tokens)`, except that using
|
||||
/// `parse_args` the error message has a more useful span when `tokens` is
|
||||
/// empty.
|
||||
///
|
||||
/// The surrounding delimiters are *not* included in the input to the
|
||||
/// parser.
|
||||
///
|
||||
/// ```text
|
||||
/// #[my_attr(value < 5)]
|
||||
/// ^^^^^^^^^ what gets parsed
|
||||
/// ```
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Attribute, Expr};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[precondition(value < 5)]
|
||||
/// };
|
||||
///
|
||||
/// if attr.path().is_ident("precondition") {
|
||||
/// let precondition: Expr = attr.parse_args()?;
|
||||
/// // ...
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_args_with(T::parse)
|
||||
}
|
||||
|
||||
/// Parse the arguments to the attribute using the given parser.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Attribute};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[inception { #[brrrrrrraaaaawwwwrwrrrmrmrmmrmrmmmmm] }]
|
||||
/// };
|
||||
///
|
||||
/// let bwom = attr.parse_args_with(Attribute::parse_outer)?;
|
||||
///
|
||||
/// // Attribute does not have a Parse impl, so we couldn't directly do:
|
||||
/// // let bwom: Attribute = attr.parse_args()?;
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
match &self.meta {
|
||||
Meta::Path(path) => Err(crate::error::new2(
|
||||
path.segments.first().unwrap().ident.span(),
|
||||
path.segments.last().unwrap().ident.span(),
|
||||
format!(
|
||||
"expected attribute arguments in parentheses: {}[{}(...)]",
|
||||
parsing::DisplayAttrStyle(&self.style),
|
||||
parsing::DisplayPath(path),
|
||||
),
|
||||
)),
|
||||
Meta::NameValue(meta) => Err(Error::new(
|
||||
meta.eq_token.span,
|
||||
format_args!(
|
||||
"expected parentheses: {}[{}(...)]",
|
||||
parsing::DisplayAttrStyle(&self.style),
|
||||
parsing::DisplayPath(&meta.path),
|
||||
),
|
||||
)),
|
||||
Meta::List(meta) => meta.parse_args_with(parser),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the arguments to the attribute, expecting it to follow the
|
||||
/// conventional structure used by most of Rust's built-in attributes.
|
||||
///
|
||||
/// The [*Meta Item Attribute Syntax*][syntax] section in the Rust reference
|
||||
/// explains the convention in more detail. Not all attributes follow this
|
||||
/// convention, so [`parse_args()`][Self::parse_args] is available if you
|
||||
/// need to parse arbitrarily goofy attribute syntax.
|
||||
///
|
||||
/// [syntax]: https://doc.rust-lang.org/reference/attributes.html#meta-item-attribute-syntax
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// We'll parse a struct, and then parse some of Rust's `#[repr]` attribute
|
||||
/// syntax.
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parenthesized, parse_quote, token, ItemStruct, LitInt};
|
||||
///
|
||||
/// let input: ItemStruct = parse_quote! {
|
||||
/// #[repr(C, align(4))]
|
||||
/// pub struct MyStruct(u16, u32);
|
||||
/// };
|
||||
///
|
||||
/// let mut repr_c = false;
|
||||
/// let mut repr_transparent = false;
|
||||
/// let mut repr_align = None::<usize>;
|
||||
/// let mut repr_packed = None::<usize>;
|
||||
/// for attr in &input.attrs {
|
||||
/// if attr.path().is_ident("repr") {
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// // #[repr(C)]
|
||||
/// if meta.path.is_ident("C") {
|
||||
/// repr_c = true;
|
||||
/// return Ok(());
|
||||
/// }
|
||||
///
|
||||
/// // #[repr(transparent)]
|
||||
/// if meta.path.is_ident("transparent") {
|
||||
/// repr_transparent = true;
|
||||
/// return Ok(());
|
||||
/// }
|
||||
///
|
||||
/// // #[repr(align(N))]
|
||||
/// if meta.path.is_ident("align") {
|
||||
/// let content;
|
||||
/// parenthesized!(content in meta.input);
|
||||
/// let lit: LitInt = content.parse()?;
|
||||
/// let n: usize = lit.base10_parse()?;
|
||||
/// repr_align = Some(n);
|
||||
/// return Ok(());
|
||||
/// }
|
||||
///
|
||||
/// // #[repr(packed)] or #[repr(packed(N))], omitted N means 1
|
||||
/// if meta.path.is_ident("packed") {
|
||||
/// if meta.input.peek(token::Paren) {
|
||||
/// let content;
|
||||
/// parenthesized!(content in meta.input);
|
||||
/// let lit: LitInt = content.parse()?;
|
||||
/// let n: usize = lit.base10_parse()?;
|
||||
/// repr_packed = Some(n);
|
||||
/// } else {
|
||||
/// repr_packed = Some(1);
|
||||
/// }
|
||||
/// return Ok(());
|
||||
/// }
|
||||
///
|
||||
/// Err(meta.error("unrecognized repr"))
|
||||
/// })?;
|
||||
/// }
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
///
|
||||
/// # Alternatives
|
||||
///
|
||||
/// In some cases, for attributes which have nested layers of structured
|
||||
/// content, the following less flexible approach might be more convenient:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{parse_quote, ItemStruct};
|
||||
/// #
|
||||
/// # let input: ItemStruct = parse_quote! {
|
||||
/// # #[repr(C, align(4))]
|
||||
/// # pub struct MyStruct(u16, u32);
|
||||
/// # };
|
||||
/// #
|
||||
/// use syn::punctuated::Punctuated;
|
||||
/// use syn::{parenthesized, token, Error, LitInt, Meta, Token};
|
||||
///
|
||||
/// let mut repr_c = false;
|
||||
/// let mut repr_transparent = false;
|
||||
/// let mut repr_align = None::<usize>;
|
||||
/// let mut repr_packed = None::<usize>;
|
||||
/// for attr in &input.attrs {
|
||||
/// if attr.path().is_ident("repr") {
|
||||
/// let nested = attr.parse_args_with(Punctuated::<Meta, Token![,]>::parse_terminated)?;
|
||||
/// for meta in nested {
|
||||
/// match meta {
|
||||
/// // #[repr(C)]
|
||||
/// Meta::Path(path) if path.is_ident("C") => {
|
||||
/// repr_c = true;
|
||||
/// }
|
||||
///
|
||||
/// // #[repr(align(N))]
|
||||
/// Meta::List(meta) if meta.path.is_ident("align") => {
|
||||
/// let lit: LitInt = meta.parse_args()?;
|
||||
/// let n: usize = lit.base10_parse()?;
|
||||
/// repr_align = Some(n);
|
||||
/// }
|
||||
///
|
||||
/// /* ... */
|
||||
///
|
||||
/// _ => {
|
||||
/// return Err(Error::new_spanned(meta, "unrecognized repr"));
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// # Ok(())
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_nested_meta(
|
||||
&self,
|
||||
logic: impl FnMut(ParseNestedMeta) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
self.parse_args_with(meta::parser(logic))
|
||||
}
|
||||
|
||||
/// Parses zero or more outer attributes from the stream.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// See
|
||||
/// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute).
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
|
||||
let mut attrs = Vec::new();
|
||||
while input.peek(Token![#]) {
|
||||
attrs.push(input.call(parsing::single_parse_outer)?);
|
||||
}
|
||||
Ok(attrs)
|
||||
}
|
||||
|
||||
/// Parses zero or more inner attributes from the stream.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// See
|
||||
/// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute).
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
|
||||
let mut attrs = Vec::new();
|
||||
parsing::parse_inner(input, &mut attrs)?;
|
||||
Ok(attrs)
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// Distinguishes between attributes that decorate an item and attributes
|
||||
/// that are contained within an item.
|
||||
///
|
||||
/// # Outer attributes
|
||||
///
|
||||
/// - `#[repr(transparent)]`
|
||||
/// - `/// # Example`
|
||||
/// - `/** Please file an issue */`
|
||||
///
|
||||
/// # Inner attributes
|
||||
///
|
||||
/// - `#![feature(proc_macro)]`
|
||||
/// - `//! # Example`
|
||||
/// - `/*! Please file an issue */`
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum AttrStyle {
|
||||
Outer,
|
||||
Inner(Token![!]),
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// Content of a compile-time structured attribute.
|
||||
///
|
||||
/// ## Path
|
||||
///
|
||||
/// A meta path is like the `test` in `#[test]`.
|
||||
///
|
||||
/// ## List
|
||||
///
|
||||
/// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`.
|
||||
///
|
||||
/// ## NameValue
|
||||
///
|
||||
/// A name-value meta is like the `path = "..."` in `#[path =
|
||||
/// "sys/windows.rs"]`.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Meta {
|
||||
Path(Path),
|
||||
|
||||
/// A structured list within an attribute, like `derive(Copy, Clone)`.
|
||||
List(MetaList),
|
||||
|
||||
/// A name-value pair within an attribute, like `feature = "nightly"`.
|
||||
NameValue(MetaNameValue),
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A structured list within an attribute, like `derive(Copy, Clone)`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct MetaList {
|
||||
pub path: Path,
|
||||
pub delimiter: MacroDelimiter,
|
||||
pub tokens: TokenStream,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A name-value pair within an attribute, like `feature = "nightly"`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct MetaNameValue {
|
||||
pub path: Path,
|
||||
pub eq_token: Token![=],
|
||||
pub value: Expr,
|
||||
}
|
||||
}
|
||||
|
||||
impl Meta {
|
||||
/// Returns the path that begins this structured meta item.
|
||||
///
|
||||
/// For example this would return the `test` in `#[test]`, the `derive` in
|
||||
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
|
||||
pub fn path(&self) -> &Path {
|
||||
match self {
|
||||
Meta::Path(path) => path,
|
||||
Meta::List(meta) => &meta.path,
|
||||
Meta::NameValue(meta) => &meta.path,
|
||||
}
|
||||
}
|
||||
|
||||
/// Error if this is a `Meta::List` or `Meta::NameValue`.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn require_path_only(&self) -> Result<&Path> {
|
||||
let error_span = match self {
|
||||
Meta::Path(path) => return Ok(path),
|
||||
Meta::List(meta) => meta.delimiter.span().open(),
|
||||
Meta::NameValue(meta) => meta.eq_token.span,
|
||||
};
|
||||
Err(Error::new(error_span, "unexpected token in attribute"))
|
||||
}
|
||||
|
||||
/// Error if this is a `Meta::Path` or `Meta::NameValue`.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn require_list(&self) -> Result<&MetaList> {
|
||||
match self {
|
||||
Meta::List(meta) => Ok(meta),
|
||||
Meta::Path(path) => Err(crate::error::new2(
|
||||
path.segments.first().unwrap().ident.span(),
|
||||
path.segments.last().unwrap().ident.span(),
|
||||
format!(
|
||||
"expected attribute arguments in parentheses: `{}(...)`",
|
||||
parsing::DisplayPath(path),
|
||||
),
|
||||
)),
|
||||
Meta::NameValue(meta) => Err(Error::new(meta.eq_token.span, "expected `(`")),
|
||||
}
|
||||
}
|
||||
|
||||
/// Error if this is a `Meta::Path` or `Meta::List`.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn require_name_value(&self) -> Result<&MetaNameValue> {
|
||||
match self {
|
||||
Meta::NameValue(meta) => Ok(meta),
|
||||
Meta::Path(path) => Err(crate::error::new2(
|
||||
path.segments.first().unwrap().ident.span(),
|
||||
path.segments.last().unwrap().ident.span(),
|
||||
format!(
|
||||
"expected a value for this attribute: `{} = ...`",
|
||||
parsing::DisplayPath(path),
|
||||
),
|
||||
)),
|
||||
Meta::List(meta) => Err(Error::new(meta.delimiter.span().open(), "expected `=`")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MetaList {
|
||||
/// See [`Attribute::parse_args`].
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_args_with(T::parse)
|
||||
}
|
||||
|
||||
/// See [`Attribute::parse_args_with`].
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
let scope = self.delimiter.span().close();
|
||||
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
|
||||
}
|
||||
|
||||
/// See [`Attribute::parse_nested_meta`].
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_nested_meta(
|
||||
&self,
|
||||
logic: impl FnMut(ParseNestedMeta) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
self.parse_args_with(meta::parser(logic))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
pub(crate) trait FilterAttrs<'a> {
|
||||
type Ret: Iterator<Item = &'a Attribute>;
|
||||
|
||||
fn outer(self) -> Self::Ret;
|
||||
#[cfg(feature = "full")]
|
||||
fn inner(self) -> Self::Ret;
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
impl<'a> FilterAttrs<'a> for &'a [Attribute] {
|
||||
type Ret = iter::Filter<slice::Iter<'a, Attribute>, fn(&&Attribute) -> bool>;
|
||||
|
||||
fn outer(self) -> Self::Ret {
|
||||
fn is_outer(attr: &&Attribute) -> bool {
|
||||
match attr.style {
|
||||
AttrStyle::Outer => true,
|
||||
AttrStyle::Inner(_) => false,
|
||||
}
|
||||
}
|
||||
self.iter().filter(is_outer)
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
fn inner(self) -> Self::Ret {
|
||||
fn is_inner(attr: &&Attribute) -> bool {
|
||||
match attr.style {
|
||||
AttrStyle::Inner(_) => true,
|
||||
AttrStyle::Outer => false,
|
||||
}
|
||||
}
|
||||
self.iter().filter(is_inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Path> for Meta {
|
||||
fn from(meta: Path) -> Meta {
|
||||
Meta::Path(meta)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MetaList> for Meta {
|
||||
fn from(meta: MetaList) -> Meta {
|
||||
Meta::List(meta)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MetaNameValue> for Meta {
|
||||
fn from(meta: MetaNameValue) -> Meta {
|
||||
Meta::NameValue(meta)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue};
|
||||
use crate::error::Result;
|
||||
use crate::expr::{Expr, ExprLit};
|
||||
use crate::lit::Lit;
|
||||
use crate::parse::discouraged::Speculative as _;
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
use crate::path::Path;
|
||||
use crate::{mac, token};
|
||||
use proc_macro2::Ident;
|
||||
use std::fmt::{self, Display};
|
||||
|
||||
pub(crate) fn parse_inner(input: ParseStream, attrs: &mut Vec<Attribute>) -> Result<()> {
|
||||
while input.peek(Token![#]) && input.peek2(Token![!]) {
|
||||
attrs.push(input.call(single_parse_inner)?);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn single_parse_inner(input: ParseStream) -> Result<Attribute> {
|
||||
let content;
|
||||
Ok(Attribute {
|
||||
pound_token: input.parse()?,
|
||||
style: AttrStyle::Inner(input.parse()?),
|
||||
bracket_token: bracketed!(content in input),
|
||||
meta: content.parse()?,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn single_parse_outer(input: ParseStream) -> Result<Attribute> {
|
||||
let content;
|
||||
Ok(Attribute {
|
||||
pound_token: input.parse()?,
|
||||
style: AttrStyle::Outer,
|
||||
bracket_token: bracketed!(content in input),
|
||||
meta: content.parse()?,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Meta {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = parse_outermost_meta_path(input)?;
|
||||
parse_meta_after_path(path, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for MetaList {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = parse_outermost_meta_path(input)?;
|
||||
parse_meta_list_after_path(path, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for MetaNameValue {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = parse_outermost_meta_path(input)?;
|
||||
parse_meta_name_value_after_path(path, input)
|
||||
}
|
||||
}
|
||||
|
||||
// Unlike meta::parse_meta_path which accepts arbitrary keywords in the path,
|
||||
// only the `unsafe` keyword is accepted as an attribute's outermost path.
|
||||
fn parse_outermost_meta_path(input: ParseStream) -> Result<Path> {
|
||||
if input.peek(Token![unsafe]) {
|
||||
let unsafe_token: Token![unsafe] = input.parse()?;
|
||||
Ok(Path::from(Ident::new("unsafe", unsafe_token.span)))
|
||||
} else {
|
||||
Path::parse_mod_style(input)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn parse_meta_after_path(path: Path, input: ParseStream) -> Result<Meta> {
|
||||
if input.peek(token::Paren) || input.peek(token::Bracket) || input.peek(token::Brace) {
|
||||
parse_meta_list_after_path(path, input).map(Meta::List)
|
||||
} else if input.peek(Token![=]) {
|
||||
parse_meta_name_value_after_path(path, input).map(Meta::NameValue)
|
||||
} else {
|
||||
Ok(Meta::Path(path))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result<MetaList> {
|
||||
let (delimiter, tokens) = mac::parse_delimiter(input)?;
|
||||
Ok(MetaList {
|
||||
path,
|
||||
delimiter,
|
||||
tokens,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result<MetaNameValue> {
|
||||
let eq_token: Token![=] = input.parse()?;
|
||||
let ahead = input.fork();
|
||||
let lit: Option<Lit> = ahead.parse()?;
|
||||
let value = if let (Some(lit), true) = (lit, ahead.is_empty()) {
|
||||
input.advance_to(&ahead);
|
||||
Expr::Lit(ExprLit {
|
||||
attrs: Vec::new(),
|
||||
lit,
|
||||
})
|
||||
} else if input.peek(Token![#]) && input.peek2(token::Bracket) {
|
||||
return Err(input.error("unexpected attribute inside of attribute"));
|
||||
} else {
|
||||
input.parse()?
|
||||
};
|
||||
Ok(MetaNameValue {
|
||||
path,
|
||||
eq_token,
|
||||
value,
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) struct DisplayAttrStyle<'a>(pub &'a AttrStyle);
|
||||
|
||||
impl<'a> Display for DisplayAttrStyle<'a> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(match self.0 {
|
||||
AttrStyle::Outer => "#",
|
||||
AttrStyle::Inner(_) => "#!",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct DisplayPath<'a>(pub &'a Path);
|
||||
|
||||
impl<'a> Display for DisplayPath<'a> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
for (i, segment) in self.0.segments.iter().enumerate() {
|
||||
if i > 0 || self.0.leading_colon.is_some() {
|
||||
formatter.write_str("::")?;
|
||||
}
|
||||
write!(formatter, "{}", segment.ident)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use crate::attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue};
|
||||
use crate::path;
|
||||
use crate::path::printing::PathStyle;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Attribute {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pound_token.to_tokens(tokens);
|
||||
if let AttrStyle::Inner(b) = &self.style {
|
||||
b.to_tokens(tokens);
|
||||
}
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.meta.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Meta {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
Meta::Path(path) => path::printing::print_path(tokens, path, PathStyle::Mod),
|
||||
Meta::List(meta_list) => meta_list.to_tokens(tokens),
|
||||
Meta::NameValue(meta_name_value) => meta_name_value.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for MetaList {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
path::printing::print_path(tokens, &self.path, PathStyle::Mod);
|
||||
self.delimiter.surround(tokens, self.tokens.clone());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for MetaNameValue {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
path::printing::print_path(tokens, &self.path, PathStyle::Mod);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
self.value.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use std::ops::{AddAssign, MulAssign};
|
||||
|
||||
// For implementing base10_digits() accessor on LitInt.
|
||||
pub(crate) struct BigInt {
|
||||
digits: Vec<u8>,
|
||||
}
|
||||
|
||||
impl BigInt {
|
||||
pub(crate) fn new() -> Self {
|
||||
BigInt { digits: Vec::new() }
|
||||
}
|
||||
|
||||
pub(crate) fn to_string(&self) -> String {
|
||||
let mut repr = String::with_capacity(self.digits.len());
|
||||
|
||||
let mut has_nonzero = false;
|
||||
for digit in self.digits.iter().rev() {
|
||||
has_nonzero |= *digit != 0;
|
||||
if has_nonzero {
|
||||
repr.push((*digit + b'0') as char);
|
||||
}
|
||||
}
|
||||
|
||||
if repr.is_empty() {
|
||||
repr.push('0');
|
||||
}
|
||||
|
||||
repr
|
||||
}
|
||||
|
||||
fn reserve_two_digits(&mut self) {
|
||||
let len = self.digits.len();
|
||||
let desired =
|
||||
len + !self.digits.ends_with(&[0, 0]) as usize + !self.digits.ends_with(&[0]) as usize;
|
||||
self.digits.resize(desired, 0);
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<u8> for BigInt {
|
||||
// Assumes increment <16.
|
||||
fn add_assign(&mut self, mut increment: u8) {
|
||||
self.reserve_two_digits();
|
||||
|
||||
let mut i = 0;
|
||||
while increment > 0 {
|
||||
let sum = self.digits[i] + increment;
|
||||
self.digits[i] = sum % 10;
|
||||
increment = sum / 10;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MulAssign<u8> for BigInt {
|
||||
// Assumes base <=16.
|
||||
fn mul_assign(&mut self, base: u8) {
|
||||
self.reserve_two_digits();
|
||||
|
||||
let mut carry = 0;
|
||||
for digit in &mut self.digits {
|
||||
let prod = *digit * base + carry;
|
||||
*digit = prod % 10;
|
||||
carry = prod / 10;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,436 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
//! A stably addressed token buffer supporting efficient traversal based on a
|
||||
//! cheaply copyable cursor.
|
||||
|
||||
// This module is heavily commented as it contains most of the unsafe code in
|
||||
// Syn, and caution should be used when editing it. The public-facing interface
|
||||
// is 100% safe but the implementation is fragile internally.
|
||||
|
||||
use crate::Lifetime;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::cmp::Ordering;
|
||||
use std::marker::PhantomData;
|
||||
use std::ptr;
|
||||
|
||||
/// Internal type which is used instead of `TokenTree` to represent a token tree
|
||||
/// within a `TokenBuffer`.
|
||||
enum Entry {
|
||||
// Mimicking types from proc-macro.
|
||||
// Group entries contain the offset to the matching End entry.
|
||||
Group(Group, usize),
|
||||
Ident(Ident),
|
||||
Punct(Punct),
|
||||
Literal(Literal),
|
||||
// End entries contain the offset (negative) to the start of the buffer, and
|
||||
// offset (negative) to the matching Group entry.
|
||||
End(isize, isize),
|
||||
}
|
||||
|
||||
/// A buffer that can be efficiently traversed multiple times, unlike
|
||||
/// `TokenStream` which requires a deep copy in order to traverse more than
|
||||
/// once.
|
||||
pub struct TokenBuffer {
|
||||
// NOTE: Do not implement clone on this - while the current design could be
|
||||
// cloned, other designs which could be desirable may not be cloneable.
|
||||
entries: Box<[Entry]>,
|
||||
}
|
||||
|
||||
impl TokenBuffer {
|
||||
fn recursive_new(entries: &mut Vec<Entry>, stream: TokenStream) {
|
||||
for tt in stream {
|
||||
match tt {
|
||||
TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)),
|
||||
TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)),
|
||||
TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)),
|
||||
TokenTree::Group(group) => {
|
||||
let group_start_index = entries.len();
|
||||
entries.push(Entry::End(0, 0)); // we replace this below
|
||||
Self::recursive_new(entries, group.stream());
|
||||
let group_end_index = entries.len();
|
||||
let group_offset = group_end_index - group_start_index;
|
||||
entries.push(Entry::End(
|
||||
-(group_end_index as isize),
|
||||
-(group_offset as isize),
|
||||
));
|
||||
entries[group_start_index] = Entry::Group(group, group_offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a `TokenBuffer` containing all the tokens from the input
|
||||
/// `proc_macro::TokenStream`.
|
||||
#[cfg(feature = "proc-macro")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))]
|
||||
pub fn new(stream: proc_macro::TokenStream) -> Self {
|
||||
Self::new2(stream.into())
|
||||
}
|
||||
|
||||
/// Creates a `TokenBuffer` containing all the tokens from the input
|
||||
/// `proc_macro2::TokenStream`.
|
||||
pub fn new2(stream: TokenStream) -> Self {
|
||||
let mut entries = Vec::new();
|
||||
Self::recursive_new(&mut entries, stream);
|
||||
entries.push(Entry::End(-(entries.len() as isize), 0));
|
||||
Self {
|
||||
entries: entries.into_boxed_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a cursor referencing the first token in the buffer and able to
|
||||
/// traverse until the end of the buffer.
|
||||
pub fn begin(&self) -> Cursor {
|
||||
let ptr = self.entries.as_ptr();
|
||||
unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) }
|
||||
}
|
||||
}
|
||||
|
||||
/// A cheaply copyable cursor into a `TokenBuffer`.
|
||||
///
|
||||
/// This cursor holds a shared reference into the immutable data which is used
|
||||
/// internally to represent a `TokenStream`, and can be efficiently manipulated
|
||||
/// and copied around.
|
||||
///
|
||||
/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer`
|
||||
/// object and get a cursor to its first token with `begin()`.
|
||||
pub struct Cursor<'a> {
|
||||
// The current entry which the `Cursor` is pointing at.
|
||||
ptr: *const Entry,
|
||||
// This is the only `Entry::End` object which this cursor is allowed to
|
||||
// point at. All other `End` objects are skipped over in `Cursor::create`.
|
||||
scope: *const Entry,
|
||||
// Cursor is covariant in 'a. This field ensures that our pointers are still
|
||||
// valid.
|
||||
marker: PhantomData<&'a Entry>,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
/// Creates a cursor referencing a static empty TokenStream.
|
||||
pub fn empty() -> Self {
|
||||
// It's safe in this situation for us to put an `Entry` object in global
|
||||
// storage, despite it not actually being safe to send across threads
|
||||
// (`Ident` is a reference into a thread-local table). This is because
|
||||
// this entry never includes a `Ident` object.
|
||||
//
|
||||
// This wrapper struct allows us to break the rules and put a `Sync`
|
||||
// object in global storage.
|
||||
struct UnsafeSyncEntry(Entry);
|
||||
unsafe impl Sync for UnsafeSyncEntry {}
|
||||
static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0, 0));
|
||||
|
||||
Cursor {
|
||||
ptr: &EMPTY_ENTRY.0,
|
||||
scope: &EMPTY_ENTRY.0,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// This create method intelligently exits non-explicitly-entered
|
||||
/// `None`-delimited scopes when the cursor reaches the end of them,
|
||||
/// allowing for them to be treated transparently.
|
||||
unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self {
|
||||
// NOTE: If we're looking at a `End`, we want to advance the cursor
|
||||
// past it, unless `ptr == scope`, which means that we're at the edge of
|
||||
// our cursor's scope. We should only have `ptr != scope` at the exit
|
||||
// from None-delimited groups entered with `ignore_none`.
|
||||
while let Entry::End(..) = unsafe { &*ptr } {
|
||||
if ptr::eq(ptr, scope) {
|
||||
break;
|
||||
}
|
||||
ptr = unsafe { ptr.add(1) };
|
||||
}
|
||||
|
||||
Cursor {
|
||||
ptr,
|
||||
scope,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the current entry.
|
||||
fn entry(self) -> &'a Entry {
|
||||
unsafe { &*self.ptr }
|
||||
}
|
||||
|
||||
/// Bump the cursor to point at the next token after the current one. This
|
||||
/// is undefined behavior if the cursor is currently looking at an
|
||||
/// `Entry::End`.
|
||||
///
|
||||
/// If the cursor is looking at an `Entry::Group`, the bumped cursor will
|
||||
/// point at the first token in the group (with the same scope end).
|
||||
unsafe fn bump_ignore_group(self) -> Cursor<'a> {
|
||||
unsafe { Cursor::create(self.ptr.offset(1), self.scope) }
|
||||
}
|
||||
|
||||
/// While the cursor is looking at a `None`-delimited group, move it to look
|
||||
/// at the first token inside instead. If the group is empty, this will move
|
||||
/// the cursor past the `None`-delimited group.
|
||||
///
|
||||
/// WARNING: This mutates its argument.
|
||||
fn ignore_none(&mut self) {
|
||||
while let Entry::Group(group, _) = self.entry() {
|
||||
if group.delimiter() == Delimiter::None {
|
||||
unsafe { *self = self.bump_ignore_group() };
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks whether the cursor is currently pointing at the end of its valid
|
||||
/// scope.
|
||||
pub fn eof(self) -> bool {
|
||||
// We're at eof if we're at the end of our scope.
|
||||
ptr::eq(self.ptr, self.scope)
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Ident`, returns it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump_ignore_group() })),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Punct`, returns it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Punct(punct) if punct.as_char() != '\'' => {
|
||||
Some((punct.clone(), unsafe { self.bump_ignore_group() }))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Literal`, return it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump_ignore_group() })),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Lifetime`, returns it along with a
|
||||
/// cursor pointing at the next `TokenTree`.
|
||||
pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
|
||||
let next = unsafe { self.bump_ignore_group() };
|
||||
let (ident, rest) = next.ident()?;
|
||||
let lifetime = Lifetime {
|
||||
apostrophe: punct.span(),
|
||||
ident,
|
||||
};
|
||||
Some((lifetime, rest))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Group` with the given delimiter, returns
|
||||
/// a cursor into that group and one pointing to the next `TokenTree`.
|
||||
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> {
|
||||
// If we're not trying to enter a none-delimited group, we want to
|
||||
// ignore them. We have to make sure to _not_ ignore them when we want
|
||||
// to enter them, of course. For obvious reasons.
|
||||
if delim != Delimiter::None {
|
||||
self.ignore_none();
|
||||
}
|
||||
|
||||
if let Entry::Group(group, end_offset) = self.entry() {
|
||||
if group.delimiter() == delim {
|
||||
let span = group.delim_span();
|
||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
|
||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||
return Some((inside_of_group, span, after_group));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Group`, returns a cursor into the group
|
||||
/// and one pointing to the next `TokenTree`.
|
||||
pub fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> {
|
||||
if let Entry::Group(group, end_offset) = self.entry() {
|
||||
let delimiter = group.delimiter();
|
||||
let span = group.delim_span();
|
||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
|
||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||
return Some((inside_of_group, delimiter, span, after_group));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
|
||||
if let Entry::Group(group, end_offset) = self.entry() {
|
||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||
return Some((group.clone(), after_group));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Copies all remaining tokens visible from this cursor into a
|
||||
/// `TokenStream`.
|
||||
pub fn token_stream(self) -> TokenStream {
|
||||
let mut tts = Vec::new();
|
||||
let mut cursor = self;
|
||||
while let Some((tt, rest)) = cursor.token_tree() {
|
||||
tts.push(tt);
|
||||
cursor = rest;
|
||||
}
|
||||
tts.into_iter().collect()
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `TokenTree`, returns it along with a
|
||||
/// cursor pointing at the next `TokenTree`.
|
||||
///
|
||||
/// Returns `None` if the cursor has reached the end of its stream.
|
||||
///
|
||||
/// This method does not treat `None`-delimited groups as transparent, and
|
||||
/// will return a `Group(None, ..)` if the cursor is looking at one.
|
||||
pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> {
|
||||
let (tree, len) = match self.entry() {
|
||||
Entry::Group(group, end_offset) => (group.clone().into(), *end_offset),
|
||||
Entry::Literal(literal) => (literal.clone().into(), 1),
|
||||
Entry::Ident(ident) => (ident.clone().into(), 1),
|
||||
Entry::Punct(punct) => (punct.clone().into(), 1),
|
||||
Entry::End(..) => return None,
|
||||
};
|
||||
|
||||
let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) };
|
||||
Some((tree, rest))
|
||||
}
|
||||
|
||||
/// Returns the `Span` of the current token, or `Span::call_site()` if this
|
||||
/// cursor points to eof.
|
||||
pub fn span(mut self) -> Span {
|
||||
match self.entry() {
|
||||
Entry::Group(group, _) => group.span(),
|
||||
Entry::Literal(literal) => literal.span(),
|
||||
Entry::Ident(ident) => ident.span(),
|
||||
Entry::Punct(punct) => punct.span(),
|
||||
Entry::End(_, offset) => {
|
||||
self.ptr = unsafe { self.ptr.offset(*offset) };
|
||||
if let Entry::Group(group, _) = self.entry() {
|
||||
group.span_close()
|
||||
} else {
|
||||
Span::call_site()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the `Span` of the token immediately prior to the position of
|
||||
/// this cursor, or of the current token if there is no previous one.
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub(crate) fn prev_span(mut self) -> Span {
|
||||
if start_of_buffer(self) < self.ptr {
|
||||
self.ptr = unsafe { self.ptr.offset(-1) };
|
||||
}
|
||||
self.span()
|
||||
}
|
||||
|
||||
/// Skip over the next token that is not a None-delimited group, without
|
||||
/// cloning it. Returns `None` if this cursor points to eof.
|
||||
///
|
||||
/// This method treats `'lifetimes` as a single token.
|
||||
pub(crate) fn skip(mut self) -> Option<Cursor<'a>> {
|
||||
self.ignore_none();
|
||||
|
||||
let len = match self.entry() {
|
||||
Entry::End(..) => return None,
|
||||
|
||||
// Treat lifetimes as a single tt for the purposes of 'skip'.
|
||||
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
|
||||
match unsafe { &*self.ptr.add(1) } {
|
||||
Entry::Ident(_) => 2,
|
||||
_ => 1,
|
||||
}
|
||||
}
|
||||
|
||||
Entry::Group(_, end_offset) => *end_offset,
|
||||
_ => 1,
|
||||
};
|
||||
|
||||
Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) })
|
||||
}
|
||||
|
||||
pub(crate) fn scope_delimiter(self) -> Delimiter {
|
||||
match unsafe { &*self.scope } {
|
||||
Entry::End(_, offset) => match unsafe { &*self.scope.offset(*offset) } {
|
||||
Entry::Group(group, _) => group.delimiter(),
|
||||
_ => Delimiter::None,
|
||||
},
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Copy for Cursor<'a> {}
|
||||
|
||||
impl<'a> Clone for Cursor<'a> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Eq for Cursor<'a> {}
|
||||
|
||||
impl<'a> PartialEq for Cursor<'a> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
ptr::eq(self.ptr, other.ptr)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialOrd for Cursor<'a> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
if same_buffer(*self, *other) {
|
||||
Some(cmp_assuming_same_buffer(*self, *other))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
|
||||
ptr::eq(a.scope, b.scope)
|
||||
}
|
||||
|
||||
pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool {
|
||||
ptr::eq(start_of_buffer(a), start_of_buffer(b))
|
||||
}
|
||||
|
||||
fn start_of_buffer(cursor: Cursor) -> *const Entry {
|
||||
unsafe {
|
||||
match &*cursor.scope {
|
||||
Entry::End(offset, _) => cursor.scope.offset(*offset),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering {
|
||||
a.ptr.cmp(&b.ptr)
|
||||
}
|
||||
|
||||
pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
|
||||
match cursor.entry() {
|
||||
Entry::Group(group, _) => group.span_open(),
|
||||
_ => cursor.span(),
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,313 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
use crate::expr::Expr;
|
||||
#[cfg(any(feature = "printing", feature = "full"))]
|
||||
use crate::generics::TypeParamBound;
|
||||
#[cfg(any(feature = "printing", feature = "full"))]
|
||||
use crate::path::{Path, PathArguments};
|
||||
#[cfg(any(feature = "printing", feature = "full"))]
|
||||
use crate::punctuated::Punctuated;
|
||||
#[cfg(any(feature = "printing", feature = "full"))]
|
||||
use crate::ty::{ReturnType, Type};
|
||||
#[cfg(feature = "full")]
|
||||
use proc_macro2::{Delimiter, TokenStream, TokenTree};
|
||||
#[cfg(any(feature = "printing", feature = "full"))]
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
pub(crate) fn requires_semi_to_be_stmt(expr: &Expr) -> bool {
|
||||
match expr {
|
||||
Expr::Macro(expr) => !expr.mac.delimiter.is_brace(),
|
||||
_ => requires_comma_to_be_match_arm(expr),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
pub(crate) fn requires_comma_to_be_match_arm(expr: &Expr) -> bool {
|
||||
match expr {
|
||||
Expr::If(_)
|
||||
| Expr::Match(_)
|
||||
| Expr::Block(_) | Expr::Unsafe(_) // both under ExprKind::Block in rustc
|
||||
| Expr::While(_)
|
||||
| Expr::Loop(_)
|
||||
| Expr::ForLoop(_)
|
||||
| Expr::TryBlock(_)
|
||||
| Expr::Const(_) => false,
|
||||
|
||||
Expr::Array(_)
|
||||
| Expr::Assign(_)
|
||||
| Expr::Async(_)
|
||||
| Expr::Await(_)
|
||||
| Expr::Binary(_)
|
||||
| Expr::Break(_)
|
||||
| Expr::Call(_)
|
||||
| Expr::Cast(_)
|
||||
| Expr::Closure(_)
|
||||
| Expr::Continue(_)
|
||||
| Expr::Field(_)
|
||||
| Expr::Group(_)
|
||||
| Expr::Index(_)
|
||||
| Expr::Infer(_)
|
||||
| Expr::Let(_)
|
||||
| Expr::Lit(_)
|
||||
| Expr::Macro(_)
|
||||
| Expr::MethodCall(_)
|
||||
| Expr::Paren(_)
|
||||
| Expr::Path(_)
|
||||
| Expr::Range(_)
|
||||
| Expr::RawAddr(_)
|
||||
| Expr::Reference(_)
|
||||
| Expr::Repeat(_)
|
||||
| Expr::Return(_)
|
||||
| Expr::Struct(_)
|
||||
| Expr::Try(_)
|
||||
| Expr::Tuple(_)
|
||||
| Expr::Unary(_)
|
||||
| Expr::Yield(_)
|
||||
| Expr::Verbatim(_) => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
pub(crate) fn trailing_unparameterized_path(mut ty: &Type) -> bool {
|
||||
loop {
|
||||
match ty {
|
||||
Type::BareFn(t) => match &t.output {
|
||||
ReturnType::Default => return false,
|
||||
ReturnType::Type(_, ret) => ty = ret,
|
||||
},
|
||||
Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) {
|
||||
ControlFlow::Break(trailing_path) => return trailing_path,
|
||||
ControlFlow::Continue(t) => ty = t,
|
||||
},
|
||||
Type::Path(t) => match last_type_in_path(&t.path) {
|
||||
ControlFlow::Break(trailing_path) => return trailing_path,
|
||||
ControlFlow::Continue(t) => ty = t,
|
||||
},
|
||||
Type::Ptr(t) => ty = &t.elem,
|
||||
Type::Reference(t) => ty = &t.elem,
|
||||
Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) {
|
||||
ControlFlow::Break(trailing_path) => return trailing_path,
|
||||
ControlFlow::Continue(t) => ty = t,
|
||||
},
|
||||
|
||||
Type::Array(_)
|
||||
| Type::Group(_)
|
||||
| Type::Infer(_)
|
||||
| Type::Macro(_)
|
||||
| Type::Never(_)
|
||||
| Type::Paren(_)
|
||||
| Type::Slice(_)
|
||||
| Type::Tuple(_)
|
||||
| Type::Verbatim(_) => return false,
|
||||
}
|
||||
}
|
||||
|
||||
fn last_type_in_path(path: &Path) -> ControlFlow<bool, &Type> {
|
||||
match &path.segments.last().unwrap().arguments {
|
||||
PathArguments::None => ControlFlow::Break(true),
|
||||
PathArguments::AngleBracketed(_) => ControlFlow::Break(false),
|
||||
PathArguments::Parenthesized(arg) => match &arg.output {
|
||||
ReturnType::Default => ControlFlow::Break(false),
|
||||
ReturnType::Type(_, ret) => ControlFlow::Continue(ret),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn last_type_in_bounds(
|
||||
bounds: &Punctuated<TypeParamBound, Token![+]>,
|
||||
) -> ControlFlow<bool, &Type> {
|
||||
match bounds.last().unwrap() {
|
||||
TypeParamBound::Trait(t) => last_type_in_path(&t.path),
|
||||
TypeParamBound::Lifetime(_)
|
||||
| TypeParamBound::PreciseCapture(_)
|
||||
| TypeParamBound::Verbatim(_) => ControlFlow::Break(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the expression's first token is the label of a loop/block.
|
||||
#[cfg(all(feature = "printing", feature = "full"))]
|
||||
pub(crate) fn expr_leading_label(mut expr: &Expr) -> bool {
|
||||
loop {
|
||||
match expr {
|
||||
Expr::Block(e) => return e.label.is_some(),
|
||||
Expr::ForLoop(e) => return e.label.is_some(),
|
||||
Expr::Loop(e) => return e.label.is_some(),
|
||||
Expr::While(e) => return e.label.is_some(),
|
||||
|
||||
Expr::Assign(e) => expr = &e.left,
|
||||
Expr::Await(e) => expr = &e.base,
|
||||
Expr::Binary(e) => expr = &e.left,
|
||||
Expr::Call(e) => expr = &e.func,
|
||||
Expr::Cast(e) => expr = &e.expr,
|
||||
Expr::Field(e) => expr = &e.base,
|
||||
Expr::Index(e) => expr = &e.expr,
|
||||
Expr::MethodCall(e) => expr = &e.receiver,
|
||||
Expr::Range(e) => match &e.start {
|
||||
Some(start) => expr = start,
|
||||
None => return false,
|
||||
},
|
||||
Expr::Try(e) => expr = &e.expr,
|
||||
|
||||
Expr::Array(_)
|
||||
| Expr::Async(_)
|
||||
| Expr::Break(_)
|
||||
| Expr::Closure(_)
|
||||
| Expr::Const(_)
|
||||
| Expr::Continue(_)
|
||||
| Expr::Group(_)
|
||||
| Expr::If(_)
|
||||
| Expr::Infer(_)
|
||||
| Expr::Let(_)
|
||||
| Expr::Lit(_)
|
||||
| Expr::Macro(_)
|
||||
| Expr::Match(_)
|
||||
| Expr::Paren(_)
|
||||
| Expr::Path(_)
|
||||
| Expr::RawAddr(_)
|
||||
| Expr::Reference(_)
|
||||
| Expr::Repeat(_)
|
||||
| Expr::Return(_)
|
||||
| Expr::Struct(_)
|
||||
| Expr::TryBlock(_)
|
||||
| Expr::Tuple(_)
|
||||
| Expr::Unary(_)
|
||||
| Expr::Unsafe(_)
|
||||
| Expr::Verbatim(_)
|
||||
| Expr::Yield(_) => return false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether the expression's last token is `}`.
|
||||
#[cfg(feature = "full")]
|
||||
pub(crate) fn expr_trailing_brace(mut expr: &Expr) -> bool {
|
||||
loop {
|
||||
match expr {
|
||||
Expr::Async(_)
|
||||
| Expr::Block(_)
|
||||
| Expr::Const(_)
|
||||
| Expr::ForLoop(_)
|
||||
| Expr::If(_)
|
||||
| Expr::Loop(_)
|
||||
| Expr::Match(_)
|
||||
| Expr::Struct(_)
|
||||
| Expr::TryBlock(_)
|
||||
| Expr::Unsafe(_)
|
||||
| Expr::While(_) => return true,
|
||||
|
||||
Expr::Assign(e) => expr = &e.right,
|
||||
Expr::Binary(e) => expr = &e.right,
|
||||
Expr::Break(e) => match &e.expr {
|
||||
Some(e) => expr = e,
|
||||
None => return false,
|
||||
},
|
||||
Expr::Cast(e) => return type_trailing_brace(&e.ty),
|
||||
Expr::Closure(e) => expr = &e.body,
|
||||
Expr::Let(e) => expr = &e.expr,
|
||||
Expr::Macro(e) => return e.mac.delimiter.is_brace(),
|
||||
Expr::Range(e) => match &e.end {
|
||||
Some(end) => expr = end,
|
||||
None => return false,
|
||||
},
|
||||
Expr::RawAddr(e) => expr = &e.expr,
|
||||
Expr::Reference(e) => expr = &e.expr,
|
||||
Expr::Return(e) => match &e.expr {
|
||||
Some(e) => expr = e,
|
||||
None => return false,
|
||||
},
|
||||
Expr::Unary(e) => expr = &e.expr,
|
||||
Expr::Verbatim(e) => return tokens_trailing_brace(e),
|
||||
Expr::Yield(e) => match &e.expr {
|
||||
Some(e) => expr = e,
|
||||
None => return false,
|
||||
},
|
||||
|
||||
Expr::Array(_)
|
||||
| Expr::Await(_)
|
||||
| Expr::Call(_)
|
||||
| Expr::Continue(_)
|
||||
| Expr::Field(_)
|
||||
| Expr::Group(_)
|
||||
| Expr::Index(_)
|
||||
| Expr::Infer(_)
|
||||
| Expr::Lit(_)
|
||||
| Expr::MethodCall(_)
|
||||
| Expr::Paren(_)
|
||||
| Expr::Path(_)
|
||||
| Expr::Repeat(_)
|
||||
| Expr::Try(_)
|
||||
| Expr::Tuple(_) => return false,
|
||||
}
|
||||
}
|
||||
|
||||
fn type_trailing_brace(mut ty: &Type) -> bool {
|
||||
loop {
|
||||
match ty {
|
||||
Type::BareFn(t) => match &t.output {
|
||||
ReturnType::Default => return false,
|
||||
ReturnType::Type(_, ret) => ty = ret,
|
||||
},
|
||||
Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) {
|
||||
ControlFlow::Break(trailing_brace) => return trailing_brace,
|
||||
ControlFlow::Continue(t) => ty = t,
|
||||
},
|
||||
Type::Macro(t) => return t.mac.delimiter.is_brace(),
|
||||
Type::Path(t) => match last_type_in_path(&t.path) {
|
||||
Some(t) => ty = t,
|
||||
None => return false,
|
||||
},
|
||||
Type::Ptr(t) => ty = &t.elem,
|
||||
Type::Reference(t) => ty = &t.elem,
|
||||
Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) {
|
||||
ControlFlow::Break(trailing_brace) => return trailing_brace,
|
||||
ControlFlow::Continue(t) => ty = t,
|
||||
},
|
||||
Type::Verbatim(t) => return tokens_trailing_brace(t),
|
||||
|
||||
Type::Array(_)
|
||||
| Type::Group(_)
|
||||
| Type::Infer(_)
|
||||
| Type::Never(_)
|
||||
| Type::Paren(_)
|
||||
| Type::Slice(_)
|
||||
| Type::Tuple(_) => return false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn last_type_in_path(path: &Path) -> Option<&Type> {
|
||||
match &path.segments.last().unwrap().arguments {
|
||||
PathArguments::None | PathArguments::AngleBracketed(_) => None,
|
||||
PathArguments::Parenthesized(arg) => match &arg.output {
|
||||
ReturnType::Default => None,
|
||||
ReturnType::Type(_, ret) => Some(ret),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn last_type_in_bounds(
|
||||
bounds: &Punctuated<TypeParamBound, Token![+]>,
|
||||
) -> ControlFlow<bool, &Type> {
|
||||
match bounds.last().unwrap() {
|
||||
TypeParamBound::Trait(t) => match last_type_in_path(&t.path) {
|
||||
Some(t) => ControlFlow::Continue(t),
|
||||
None => ControlFlow::Break(false),
|
||||
},
|
||||
TypeParamBound::Lifetime(_) | TypeParamBound::PreciseCapture(_) => {
|
||||
ControlFlow::Break(false)
|
||||
}
|
||||
TypeParamBound::Verbatim(t) => ControlFlow::Break(tokens_trailing_brace(t)),
|
||||
}
|
||||
}
|
||||
|
||||
fn tokens_trailing_brace(tokens: &TokenStream) -> bool {
|
||||
if let Some(TokenTree::Group(last)) = tokens.clone().into_iter().last() {
|
||||
last.delimiter() == Delimiter::Brace
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,262 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
/// Define a type that supports parsing and printing a given identifier as if it
|
||||
/// were a keyword.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// As a convention, it is recommended that this macro be invoked within a
|
||||
/// module called `kw` or `keyword` and that the resulting parser be invoked
|
||||
/// with a `kw::` or `keyword::` prefix.
|
||||
///
|
||||
/// ```
|
||||
/// mod kw {
|
||||
/// syn::custom_keyword!(whatever);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The generated syntax tree node supports the following operations just like
|
||||
/// any built-in keyword token.
|
||||
///
|
||||
/// - [Peeking] — `input.peek(kw::whatever)`
|
||||
///
|
||||
/// - [Parsing] — `input.parse::<kw::whatever>()?`
|
||||
///
|
||||
/// - [Printing] — `quote!( ... #whatever_token ... )`
|
||||
///
|
||||
/// - Construction from a [`Span`] — `let whatever_token = kw::whatever(sp)`
|
||||
///
|
||||
/// - Field access to its span — `let sp = whatever_token.span`
|
||||
///
|
||||
/// [Peeking]: crate::parse::ParseBuffer::peek
|
||||
/// [Parsing]: crate::parse::ParseBuffer::parse
|
||||
/// [Printing]: quote::ToTokens
|
||||
/// [`Span`]: proc_macro2::Span
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// This example parses input that looks like `bool = true` or `str = "value"`.
|
||||
/// The key must be either the identifier `bool` or the identifier `str`. If
|
||||
/// `bool`, the value may be either `true` or `false`. If `str`, the value may
|
||||
/// be any string literal.
|
||||
///
|
||||
/// The symbols `bool` and `str` are not reserved keywords in Rust so these are
|
||||
/// not considered keywords in the `syn::token` module. Like any other
|
||||
/// identifier that is not a keyword, these can be declared as custom keywords
|
||||
/// by crates that need to use them as such.
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{LitBool, LitStr, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// mod kw {
|
||||
/// syn::custom_keyword!(bool);
|
||||
/// syn::custom_keyword!(str);
|
||||
/// }
|
||||
///
|
||||
/// enum Argument {
|
||||
/// Bool {
|
||||
/// bool_token: kw::bool,
|
||||
/// eq_token: Token![=],
|
||||
/// value: LitBool,
|
||||
/// },
|
||||
/// Str {
|
||||
/// str_token: kw::str,
|
||||
/// eq_token: Token![=],
|
||||
/// value: LitStr,
|
||||
/// },
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for Argument {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let lookahead = input.lookahead1();
|
||||
/// if lookahead.peek(kw::bool) {
|
||||
/// Ok(Argument::Bool {
|
||||
/// bool_token: input.parse::<kw::bool>()?,
|
||||
/// eq_token: input.parse()?,
|
||||
/// value: input.parse()?,
|
||||
/// })
|
||||
/// } else if lookahead.peek(kw::str) {
|
||||
/// Ok(Argument::Str {
|
||||
/// str_token: input.parse::<kw::str>()?,
|
||||
/// eq_token: input.parse()?,
|
||||
/// value: input.parse()?,
|
||||
/// })
|
||||
/// } else {
|
||||
/// Err(lookahead.error())
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! custom_keyword {
|
||||
($ident:ident) => {
|
||||
#[allow(non_camel_case_types)]
|
||||
pub struct $ident {
|
||||
#[allow(dead_code)]
|
||||
pub span: $crate::__private::Span,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(dead_code, non_snake_case)]
|
||||
pub fn $ident<__S: $crate::__private::IntoSpans<$crate::__private::Span>>(
|
||||
span: __S,
|
||||
) -> $ident {
|
||||
$ident {
|
||||
span: $crate::__private::IntoSpans::into_spans(span),
|
||||
}
|
||||
}
|
||||
|
||||
const _: () = {
|
||||
impl $crate::__private::Default for $ident {
|
||||
fn default() -> Self {
|
||||
$ident {
|
||||
span: $crate::__private::Span::call_site(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$crate::impl_parse_for_custom_keyword!($ident);
|
||||
$crate::impl_to_tokens_for_custom_keyword!($ident);
|
||||
$crate::impl_clone_for_custom_keyword!($ident);
|
||||
$crate::impl_extra_traits_for_custom_keyword!($ident);
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
// For peek.
|
||||
impl $crate::__private::CustomToken for $ident {
|
||||
fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
|
||||
if let $crate::__private::Some((ident, _rest)) = cursor.ident() {
|
||||
ident == $crate::__private::stringify!($ident)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn display() -> &'static $crate::__private::str {
|
||||
$crate::__private::concat!("`", $crate::__private::stringify!($ident), "`")
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::parse::Parse for $ident {
|
||||
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
|
||||
input.step(|cursor| {
|
||||
if let $crate::__private::Some((ident, rest)) = cursor.ident() {
|
||||
if ident == $crate::__private::stringify!($ident) {
|
||||
return $crate::__private::Ok(($ident { span: ident.span() }, rest));
|
||||
}
|
||||
}
|
||||
$crate::__private::Err(cursor.error($crate::__private::concat!(
|
||||
"expected `",
|
||||
$crate::__private::stringify!($ident),
|
||||
"`",
|
||||
)))
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "parsing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::__private::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
|
||||
let ident = $crate::Ident::new($crate::__private::stringify!($ident), self.span);
|
||||
$crate::__private::TokenStreamExt::append(tokens, ident);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "printing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::__private::Copy for $ident {}
|
||||
|
||||
#[allow(clippy::expl_impl_clone_on_copy)]
|
||||
impl $crate::__private::Clone for $ident {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "clone-impls"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::__private::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult {
|
||||
$crate::__private::Formatter::write_str(
|
||||
f,
|
||||
$crate::__private::concat!(
|
||||
"Keyword [",
|
||||
$crate::__private::stringify!($ident),
|
||||
"]",
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Eq for $ident {}
|
||||
|
||||
impl $crate::__private::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::__private::bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Hash for $ident {
|
||||
fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "extra-traits"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
|
@ -0,0 +1,306 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
/// Define a type that supports parsing and printing a multi-character symbol
|
||||
/// as if it were a punctuation token.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// ```
|
||||
/// syn::custom_punctuation!(LeftRightArrow, <=>);
|
||||
/// ```
|
||||
///
|
||||
/// The generated syntax tree node supports the following operations just like
|
||||
/// any built-in punctuation token.
|
||||
///
|
||||
/// - [Peeking] — `input.peek(LeftRightArrow)`
|
||||
///
|
||||
/// - [Parsing] — `input.parse::<LeftRightArrow>()?`
|
||||
///
|
||||
/// - [Printing] — `quote!( ... #lrarrow ... )`
|
||||
///
|
||||
/// - Construction from a [`Span`] — `let lrarrow = LeftRightArrow(sp)`
|
||||
///
|
||||
/// - Construction from multiple [`Span`] — `let lrarrow = LeftRightArrow([sp, sp, sp])`
|
||||
///
|
||||
/// - Field access to its spans — `let spans = lrarrow.spans`
|
||||
///
|
||||
/// [Peeking]: crate::parse::ParseBuffer::peek
|
||||
/// [Parsing]: crate::parse::ParseBuffer::parse
|
||||
/// [Printing]: quote::ToTokens
|
||||
/// [`Span`]: proc_macro2::Span
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::{TokenStream, TokenTree};
|
||||
/// use syn::parse::{Parse, ParseStream, Peek, Result};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
/// use syn::Expr;
|
||||
///
|
||||
/// syn::custom_punctuation!(PathSeparator, </>);
|
||||
///
|
||||
/// // expr </> expr </> expr ...
|
||||
/// struct PathSegments {
|
||||
/// segments: Punctuated<Expr, PathSeparator>,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for PathSegments {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let mut segments = Punctuated::new();
|
||||
///
|
||||
/// let first = parse_until(input, PathSeparator)?;
|
||||
/// segments.push_value(syn::parse2(first)?);
|
||||
///
|
||||
/// while input.peek(PathSeparator) {
|
||||
/// segments.push_punct(input.parse()?);
|
||||
///
|
||||
/// let next = parse_until(input, PathSeparator)?;
|
||||
/// segments.push_value(syn::parse2(next)?);
|
||||
/// }
|
||||
///
|
||||
/// Ok(PathSegments { segments })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn parse_until<E: Peek>(input: ParseStream, end: E) -> Result<TokenStream> {
|
||||
/// let mut tokens = TokenStream::new();
|
||||
/// while !input.is_empty() && !input.peek(end) {
|
||||
/// let next: TokenTree = input.parse()?;
|
||||
/// tokens.extend(Some(next));
|
||||
/// }
|
||||
/// Ok(tokens)
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let input = r#" a::b </> c::d::e "#;
|
||||
/// let _: PathSegments = syn::parse_str(input).unwrap();
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
pub struct $ident {
|
||||
#[allow(dead_code)]
|
||||
pub spans: $crate::custom_punctuation_repr!($($tt)+),
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(dead_code, non_snake_case)]
|
||||
pub fn $ident<__S: $crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
|
||||
spans: __S,
|
||||
) -> $ident {
|
||||
let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
|
||||
$ident {
|
||||
spans: $crate::__private::IntoSpans::into_spans(spans)
|
||||
}
|
||||
}
|
||||
|
||||
const _: () = {
|
||||
impl $crate::__private::Default for $ident {
|
||||
fn default() -> Self {
|
||||
$ident($crate::__private::Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
$crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
|
||||
$crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
|
||||
$crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
|
||||
$crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::CustomToken for $ident {
|
||||
fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
|
||||
$crate::__private::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
|
||||
}
|
||||
|
||||
fn display() -> &'static $crate::__private::str {
|
||||
$crate::__private::concat!("`", $crate::stringify_punct!($($tt)+), "`")
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::parse::Parse for $ident {
|
||||
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
|
||||
let spans: $crate::custom_punctuation_repr!($($tt)+) =
|
||||
$crate::__private::parse_punct(input, $crate::stringify_punct!($($tt)+))?;
|
||||
Ok($ident(spans))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "parsing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
|
||||
$crate::__private::print_punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "printing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::Copy for $ident {}
|
||||
|
||||
#[allow(clippy::expl_impl_clone_on_copy)]
|
||||
impl $crate::__private::Clone for $ident {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "clone-impls"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult {
|
||||
$crate::__private::Formatter::write_str(f, $crate::__private::stringify!($ident))
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Eq for $ident {}
|
||||
|
||||
impl $crate::__private::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::__private::bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Hash for $ident {
|
||||
fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "extra-traits"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation_repr {
|
||||
($($tt:tt)+) => {
|
||||
[$crate::__private::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
#[rustfmt::skip]
|
||||
macro_rules! custom_punctuation_len {
|
||||
($mode:ident, &) => { 1 };
|
||||
($mode:ident, &&) => { 2 };
|
||||
($mode:ident, &=) => { 2 };
|
||||
($mode:ident, @) => { 1 };
|
||||
($mode:ident, ^) => { 1 };
|
||||
($mode:ident, ^=) => { 2 };
|
||||
($mode:ident, :) => { 1 };
|
||||
($mode:ident, ,) => { 1 };
|
||||
($mode:ident, $) => { 1 };
|
||||
($mode:ident, .) => { 1 };
|
||||
($mode:ident, ..) => { 2 };
|
||||
($mode:ident, ...) => { 3 };
|
||||
($mode:ident, ..=) => { 3 };
|
||||
($mode:ident, =) => { 1 };
|
||||
($mode:ident, ==) => { 2 };
|
||||
($mode:ident, =>) => { 2 };
|
||||
($mode:ident, >=) => { 2 };
|
||||
($mode:ident, >) => { 1 };
|
||||
($mode:ident, <-) => { 2 };
|
||||
($mode:ident, <=) => { 2 };
|
||||
($mode:ident, <) => { 1 };
|
||||
($mode:ident, -) => { 1 };
|
||||
($mode:ident, -=) => { 2 };
|
||||
($mode:ident, !=) => { 2 };
|
||||
($mode:ident, !) => { 1 };
|
||||
($mode:ident, |) => { 1 };
|
||||
($mode:ident, |=) => { 2 };
|
||||
($mode:ident, ||) => { 2 };
|
||||
($mode:ident, ::) => { 2 };
|
||||
($mode:ident, %) => { 1 };
|
||||
($mode:ident, %=) => { 2 };
|
||||
($mode:ident, +) => { 1 };
|
||||
($mode:ident, +=) => { 2 };
|
||||
($mode:ident, #) => { 1 };
|
||||
($mode:ident, ?) => { 1 };
|
||||
($mode:ident, ->) => { 2 };
|
||||
($mode:ident, ;) => { 1 };
|
||||
($mode:ident, <<) => { 2 };
|
||||
($mode:ident, <<=) => { 3 };
|
||||
($mode:ident, >>) => { 2 };
|
||||
($mode:ident, >>=) => { 3 };
|
||||
($mode:ident, /) => { 1 };
|
||||
($mode:ident, /=) => { 2 };
|
||||
($mode:ident, *) => { 1 };
|
||||
($mode:ident, *=) => { 2 };
|
||||
($mode:ident, ~) => { 1 };
|
||||
(lenient, $tt:tt) => { 0 };
|
||||
(strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation_unexpected {
|
||||
() => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! stringify_punct {
|
||||
($($tt:tt)+) => {
|
||||
$crate::__private::concat!($($crate::__private::stringify!($tt)),+)
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1,426 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::attr::Attribute;
|
||||
use crate::expr::{Expr, Index, Member};
|
||||
use crate::ident::Ident;
|
||||
use crate::punctuated::{self, Punctuated};
|
||||
use crate::restriction::{FieldMutability, Visibility};
|
||||
use crate::token;
|
||||
use crate::ty::Type;
|
||||
|
||||
ast_struct! {
|
||||
/// An enum variant.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Variant {
|
||||
pub attrs: Vec<Attribute>,
|
||||
|
||||
/// Name of the variant.
|
||||
pub ident: Ident,
|
||||
|
||||
/// Content stored in the variant.
|
||||
pub fields: Fields,
|
||||
|
||||
/// Explicit discriminant: `Variant = 1`
|
||||
pub discriminant: Option<(Token![=], Expr)>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// Data stored within an enum variant or struct.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Fields {
|
||||
/// Named fields of a struct or struct variant such as `Point { x: f64,
|
||||
/// y: f64 }`.
|
||||
Named(FieldsNamed),
|
||||
|
||||
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
|
||||
Unnamed(FieldsUnnamed),
|
||||
|
||||
/// Unit struct or unit variant such as `None`.
|
||||
Unit,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Named fields of a struct or struct variant such as `Point { x: f64,
|
||||
/// y: f64 }`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct FieldsNamed {
|
||||
pub brace_token: token::Brace,
|
||||
pub named: Punctuated<Field, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct FieldsUnnamed {
|
||||
pub paren_token: token::Paren,
|
||||
pub unnamed: Punctuated<Field, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl Fields {
|
||||
/// Get an iterator over the borrowed [`Field`] items in this object. This
|
||||
/// iterator can be used to iterate over a named or unnamed struct or
|
||||
/// variant's fields uniformly.
|
||||
pub fn iter(&self) -> punctuated::Iter<Field> {
|
||||
match self {
|
||||
Fields::Unit => crate::punctuated::empty_punctuated_iter(),
|
||||
Fields::Named(f) => f.named.iter(),
|
||||
Fields::Unnamed(f) => f.unnamed.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get an iterator over the mutably borrowed [`Field`] items in this
|
||||
/// object. This iterator can be used to iterate over a named or unnamed
|
||||
/// struct or variant's fields uniformly.
|
||||
pub fn iter_mut(&mut self) -> punctuated::IterMut<Field> {
|
||||
match self {
|
||||
Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(),
|
||||
Fields::Named(f) => f.named.iter_mut(),
|
||||
Fields::Unnamed(f) => f.unnamed.iter_mut(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the number of fields.
|
||||
pub fn len(&self) -> usize {
|
||||
match self {
|
||||
Fields::Unit => 0,
|
||||
Fields::Named(f) => f.named.len(),
|
||||
Fields::Unnamed(f) => f.unnamed.len(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if there are zero fields.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
Fields::Unit => true,
|
||||
Fields::Named(f) => f.named.is_empty(),
|
||||
Fields::Unnamed(f) => f.unnamed.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
return_impl_trait! {
|
||||
/// Get an iterator over the fields of a struct or variant as [`Member`]s.
|
||||
/// This iterator can be used to iterate over a named or unnamed struct or
|
||||
/// variant's fields uniformly.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// The following is a simplistic [`Clone`] derive for structs. (A more
|
||||
/// complete implementation would additionally want to infer trait bounds on
|
||||
/// the generic type parameters.)
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// fn derive_clone(input: &syn::ItemStruct) -> proc_macro2::TokenStream {
|
||||
/// let ident = &input.ident;
|
||||
/// let members = input.fields.members();
|
||||
/// let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
|
||||
/// quote! {
|
||||
/// impl #impl_generics Clone for #ident #ty_generics #where_clause {
|
||||
/// fn clone(&self) -> Self {
|
||||
/// Self {
|
||||
/// #(#members: self.#members.clone()),*
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// For structs with named fields, it produces an expression like `Self { a:
|
||||
/// self.a.clone() }`. For structs with unnamed fields, `Self { 0:
|
||||
/// self.0.clone() }`. And for unit structs, `Self {}`.
|
||||
pub fn members(&self) -> impl Iterator<Item = Member> + Clone + '_ [Members] {
|
||||
Members {
|
||||
fields: self.iter(),
|
||||
index: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Fields {
|
||||
type Item = Field;
|
||||
type IntoIter = punctuated::IntoIter<Field>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
match self {
|
||||
Fields::Unit => Punctuated::<Field, ()>::new().into_iter(),
|
||||
Fields::Named(f) => f.named.into_iter(),
|
||||
Fields::Unnamed(f) => f.unnamed.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a Fields {
|
||||
type Item = &'a Field;
|
||||
type IntoIter = punctuated::Iter<'a, Field>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a mut Fields {
|
||||
type Item = &'a mut Field;
|
||||
type IntoIter = punctuated::IterMut<'a, Field>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter_mut()
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A field of a struct or enum variant.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Field {
|
||||
pub attrs: Vec<Attribute>,
|
||||
|
||||
pub vis: Visibility,
|
||||
|
||||
pub mutability: FieldMutability,
|
||||
|
||||
/// Name of the field, if any.
|
||||
///
|
||||
/// Fields of tuple structs have no names.
|
||||
pub ident: Option<Ident>,
|
||||
|
||||
pub colon_token: Option<Token![:]>,
|
||||
|
||||
pub ty: Type,
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Members<'a> {
|
||||
fields: punctuated::Iter<'a, Field>,
|
||||
index: u32,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Members<'a> {
|
||||
type Item = Member;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let field = self.fields.next()?;
|
||||
let member = match &field.ident {
|
||||
Some(ident) => Member::Named(ident.clone()),
|
||||
None => {
|
||||
#[cfg(all(feature = "parsing", feature = "printing"))]
|
||||
let span = crate::spanned::Spanned::span(&field.ty);
|
||||
#[cfg(not(all(feature = "parsing", feature = "printing")))]
|
||||
let span = proc_macro2::Span::call_site();
|
||||
Member::Unnamed(Index {
|
||||
index: self.index,
|
||||
span,
|
||||
})
|
||||
}
|
||||
};
|
||||
self.index += 1;
|
||||
Some(member)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Clone for Members<'a> {
|
||||
fn clone(&self) -> Self {
|
||||
Members {
|
||||
fields: self.fields.clone(),
|
||||
index: self.index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::attr::Attribute;
|
||||
use crate::data::{Field, Fields, FieldsNamed, FieldsUnnamed, Variant};
|
||||
use crate::error::Result;
|
||||
use crate::expr::Expr;
|
||||
use crate::ext::IdentExt as _;
|
||||
use crate::ident::Ident;
|
||||
#[cfg(not(feature = "full"))]
|
||||
use crate::parse::discouraged::Speculative as _;
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
use crate::restriction::{FieldMutability, Visibility};
|
||||
#[cfg(not(feature = "full"))]
|
||||
use crate::scan_expr::scan_expr;
|
||||
use crate::token;
|
||||
use crate::ty::Type;
|
||||
use crate::verbatim;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Variant {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let _visibility: Visibility = input.parse()?;
|
||||
let ident: Ident = input.parse()?;
|
||||
let fields = if input.peek(token::Brace) {
|
||||
Fields::Named(input.parse()?)
|
||||
} else if input.peek(token::Paren) {
|
||||
Fields::Unnamed(input.parse()?)
|
||||
} else {
|
||||
Fields::Unit
|
||||
};
|
||||
let discriminant = if input.peek(Token![=]) {
|
||||
let eq_token: Token![=] = input.parse()?;
|
||||
#[cfg(feature = "full")]
|
||||
let discriminant: Expr = input.parse()?;
|
||||
#[cfg(not(feature = "full"))]
|
||||
let discriminant = {
|
||||
let begin = input.fork();
|
||||
let ahead = input.fork();
|
||||
let mut discriminant: Result<Expr> = ahead.parse();
|
||||
if discriminant.is_ok() {
|
||||
input.advance_to(&ahead);
|
||||
} else if scan_expr(input).is_ok() {
|
||||
discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input)));
|
||||
}
|
||||
discriminant?
|
||||
};
|
||||
Some((eq_token, discriminant))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Variant {
|
||||
attrs,
|
||||
ident,
|
||||
fields,
|
||||
discriminant,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for FieldsNamed {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(FieldsNamed {
|
||||
brace_token: braced!(content in input),
|
||||
named: content.parse_terminated(Field::parse_named, Token![,])?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for FieldsUnnamed {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(FieldsUnnamed {
|
||||
paren_token: parenthesized!(content in input),
|
||||
unnamed: content.parse_terminated(Field::parse_unnamed, Token![,])?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Field {
|
||||
/// Parses a named (braced struct) field.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_named(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let vis: Visibility = input.parse()?;
|
||||
|
||||
let unnamed_field = cfg!(feature = "full") && input.peek(Token![_]);
|
||||
let ident = if unnamed_field {
|
||||
input.call(Ident::parse_any)
|
||||
} else {
|
||||
input.parse()
|
||||
}?;
|
||||
|
||||
let colon_token: Token![:] = input.parse()?;
|
||||
|
||||
let ty: Type = if unnamed_field
|
||||
&& (input.peek(Token![struct])
|
||||
|| input.peek(Token![union]) && input.peek2(token::Brace))
|
||||
{
|
||||
let begin = input.fork();
|
||||
input.call(Ident::parse_any)?;
|
||||
input.parse::<FieldsNamed>()?;
|
||||
Type::Verbatim(verbatim::between(&begin, input))
|
||||
} else {
|
||||
input.parse()?
|
||||
};
|
||||
|
||||
Ok(Field {
|
||||
attrs,
|
||||
vis,
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(ident),
|
||||
colon_token: Some(colon_token),
|
||||
ty,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses an unnamed (tuple struct) field.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_unnamed(input: ParseStream) -> Result<Self> {
|
||||
Ok(Field {
|
||||
attrs: input.call(Attribute::parse_outer)?,
|
||||
vis: input.parse()?,
|
||||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use crate::data::{Field, FieldsNamed, FieldsUnnamed, Variant};
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Variant {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
self.ident.to_tokens(tokens);
|
||||
self.fields.to_tokens(tokens);
|
||||
if let Some((eq_token, disc)) = &self.discriminant {
|
||||
eq_token.to_tokens(tokens);
|
||||
disc.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldsNamed {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
self.named.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldsUnnamed {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.unnamed.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Field {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
self.vis.to_tokens(tokens);
|
||||
if let Some(ident) = &self.ident {
|
||||
ident.to_tokens(tokens);
|
||||
TokensOrDefault(&self.colon_token).to_tokens(tokens);
|
||||
}
|
||||
self.ty.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,261 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::attr::Attribute;
|
||||
use crate::data::{Fields, FieldsNamed, Variant};
|
||||
use crate::generics::Generics;
|
||||
use crate::ident::Ident;
|
||||
use crate::punctuated::Punctuated;
|
||||
use crate::restriction::Visibility;
|
||||
use crate::token;
|
||||
|
||||
ast_struct! {
|
||||
/// Data structure sent to a `proc_macro_derive` macro.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
|
||||
pub struct DeriveInput {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub vis: Visibility,
|
||||
pub ident: Ident,
|
||||
pub generics: Generics,
|
||||
pub data: Data,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// The storage of a struct, enum or union data structure.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
|
||||
pub enum Data {
|
||||
Struct(DataStruct),
|
||||
Enum(DataEnum),
|
||||
Union(DataUnion),
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A struct input to a `proc_macro_derive` macro.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
|
||||
pub struct DataStruct {
|
||||
pub struct_token: Token![struct],
|
||||
pub fields: Fields,
|
||||
pub semi_token: Option<Token![;]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An enum input to a `proc_macro_derive` macro.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
|
||||
pub struct DataEnum {
|
||||
pub enum_token: Token![enum],
|
||||
pub brace_token: token::Brace,
|
||||
pub variants: Punctuated<Variant, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An untagged union input to a `proc_macro_derive` macro.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
|
||||
pub struct DataUnion {
|
||||
pub union_token: Token![union],
|
||||
pub fields: FieldsNamed,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::attr::Attribute;
|
||||
use crate::data::{Fields, FieldsNamed, Variant};
|
||||
use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
|
||||
use crate::error::Result;
|
||||
use crate::generics::{Generics, WhereClause};
|
||||
use crate::ident::Ident;
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
use crate::punctuated::Punctuated;
|
||||
use crate::restriction::Visibility;
|
||||
use crate::token;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for DeriveInput {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let vis = input.parse::<Visibility>()?;
|
||||
|
||||
let lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![struct]) {
|
||||
let struct_token = input.parse::<Token![struct]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, fields, semi) = data_struct(input)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics: Generics {
|
||||
where_clause,
|
||||
..generics
|
||||
},
|
||||
data: Data::Struct(DataStruct {
|
||||
struct_token,
|
||||
fields,
|
||||
semi_token: semi,
|
||||
}),
|
||||
})
|
||||
} else if lookahead.peek(Token![enum]) {
|
||||
let enum_token = input.parse::<Token![enum]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, brace, variants) = data_enum(input)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics: Generics {
|
||||
where_clause,
|
||||
..generics
|
||||
},
|
||||
data: Data::Enum(DataEnum {
|
||||
enum_token,
|
||||
brace_token: brace,
|
||||
variants,
|
||||
}),
|
||||
})
|
||||
} else if lookahead.peek(Token![union]) {
|
||||
let union_token = input.parse::<Token![union]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, fields) = data_union(input)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics: Generics {
|
||||
where_clause,
|
||||
..generics
|
||||
},
|
||||
data: Data::Union(DataUnion {
|
||||
union_token,
|
||||
fields,
|
||||
}),
|
||||
})
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn data_struct(
|
||||
input: ParseStream,
|
||||
) -> Result<(Option<WhereClause>, Fields, Option<Token![;]>)> {
|
||||
let mut lookahead = input.lookahead1();
|
||||
let mut where_clause = None;
|
||||
if lookahead.peek(Token![where]) {
|
||||
where_clause = Some(input.parse()?);
|
||||
lookahead = input.lookahead1();
|
||||
}
|
||||
|
||||
if where_clause.is_none() && lookahead.peek(token::Paren) {
|
||||
let fields = input.parse()?;
|
||||
|
||||
lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![where]) {
|
||||
where_clause = Some(input.parse()?);
|
||||
lookahead = input.lookahead1();
|
||||
}
|
||||
|
||||
if lookahead.peek(Token![;]) {
|
||||
let semi = input.parse()?;
|
||||
Ok((where_clause, Fields::Unnamed(fields), Some(semi)))
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
} else if lookahead.peek(token::Brace) {
|
||||
let fields = input.parse()?;
|
||||
Ok((where_clause, Fields::Named(fields), None))
|
||||
} else if lookahead.peek(Token![;]) {
|
||||
let semi = input.parse()?;
|
||||
Ok((where_clause, Fields::Unit, Some(semi)))
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn data_enum(
|
||||
input: ParseStream,
|
||||
) -> Result<(
|
||||
Option<WhereClause>,
|
||||
token::Brace,
|
||||
Punctuated<Variant, Token![,]>,
|
||||
)> {
|
||||
let where_clause = input.parse()?;
|
||||
|
||||
let content;
|
||||
let brace = braced!(content in input);
|
||||
let variants = content.parse_terminated(Variant::parse, Token![,])?;
|
||||
|
||||
Ok((where_clause, brace, variants))
|
||||
}
|
||||
|
||||
pub(crate) fn data_union(input: ParseStream) -> Result<(Option<WhereClause>, FieldsNamed)> {
|
||||
let where_clause = input.parse()?;
|
||||
let fields = input.parse()?;
|
||||
Ok((where_clause, fields))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::data::Fields;
|
||||
use crate::derive::{Data, DeriveInput};
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for DeriveInput {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
for attr in self.attrs.outer() {
|
||||
attr.to_tokens(tokens);
|
||||
}
|
||||
self.vis.to_tokens(tokens);
|
||||
match &self.data {
|
||||
Data::Struct(d) => d.struct_token.to_tokens(tokens),
|
||||
Data::Enum(d) => d.enum_token.to_tokens(tokens),
|
||||
Data::Union(d) => d.union_token.to_tokens(tokens),
|
||||
}
|
||||
self.ident.to_tokens(tokens);
|
||||
self.generics.to_tokens(tokens);
|
||||
match &self.data {
|
||||
Data::Struct(data) => match &data.fields {
|
||||
Fields::Named(fields) => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
fields.to_tokens(tokens);
|
||||
}
|
||||
Fields::Unnamed(fields) => {
|
||||
fields.to_tokens(tokens);
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
TokensOrDefault(&data.semi_token).to_tokens(tokens);
|
||||
}
|
||||
Fields::Unit => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
TokensOrDefault(&data.semi_token).to_tokens(tokens);
|
||||
}
|
||||
},
|
||||
Data::Enum(data) => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
data.brace_token.surround(tokens, |tokens| {
|
||||
data.variants.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
Data::Union(data) => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
data.fields.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,227 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
//! Extensions to the parsing API with niche applicability.
|
||||
|
||||
use crate::buffer::Cursor;
|
||||
use crate::error::Result;
|
||||
use crate::parse::{inner_unexpected, ParseBuffer, Unexpected};
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::Delimiter;
|
||||
use std::cell::Cell;
|
||||
use std::mem;
|
||||
use std::rc::Rc;
|
||||
|
||||
/// Extensions to the `ParseStream` API to support speculative parsing.
|
||||
pub trait Speculative {
|
||||
/// Advance this parse stream to the position of a forked parse stream.
|
||||
///
|
||||
/// This is the opposite operation to [`ParseStream::fork`]. You can fork a
|
||||
/// parse stream, perform some speculative parsing, then join the original
|
||||
/// stream to the fork to "commit" the parsing from the fork to the main
|
||||
/// stream.
|
||||
///
|
||||
/// If you can avoid doing this, you should, as it limits the ability to
|
||||
/// generate useful errors. That said, it is often the only way to parse
|
||||
/// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
|
||||
/// is that when the fork fails to parse an `A`, it's impossible to tell
|
||||
/// whether that was because of a syntax error and the user meant to provide
|
||||
/// an `A`, or that the `A`s are finished and it's time to start parsing
|
||||
/// `B`s. Use with care.
|
||||
///
|
||||
/// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
|
||||
/// parsing `B*` and removing the leading members of `A` from the
|
||||
/// repetition, bypassing the need to involve the downsides associated with
|
||||
/// speculative parsing.
|
||||
///
|
||||
/// [`ParseStream::fork`]: ParseBuffer::fork
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// There has been chatter about the possibility of making the colons in the
|
||||
/// turbofish syntax like `path::to::<T>` no longer required by accepting
|
||||
/// `path::to<T>` in expression position. Specifically, according to [RFC
|
||||
/// 2544], [`PathSegment`] parsing should always try to consume a following
|
||||
/// `<` token as the start of generic arguments, and reset to the `<` if
|
||||
/// that fails (e.g. the token is acting as a less-than operator).
|
||||
///
|
||||
/// This is the exact kind of parsing behavior which requires the "fork,
|
||||
/// try, commit" behavior that [`ParseStream::fork`] discourages. With
|
||||
/// `advance_to`, we can avoid having to parse the speculatively parsed
|
||||
/// content a second time.
|
||||
///
|
||||
/// This change in behavior can be implemented in syn by replacing just the
|
||||
/// `Parse` implementation for `PathSegment`:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::ext::IdentExt;
|
||||
/// use syn::parse::discouraged::Speculative;
|
||||
/// # use syn::parse::{Parse, ParseStream};
|
||||
/// # use syn::{Ident, PathArguments, Result, Token};
|
||||
///
|
||||
/// pub struct PathSegment {
|
||||
/// pub ident: Ident,
|
||||
/// pub arguments: PathArguments,
|
||||
/// }
|
||||
/// #
|
||||
/// # impl<T> From<T> for PathSegment
|
||||
/// # where
|
||||
/// # T: Into<Ident>,
|
||||
/// # {
|
||||
/// # fn from(ident: T) -> Self {
|
||||
/// # PathSegment {
|
||||
/// # ident: ident.into(),
|
||||
/// # arguments: PathArguments::None,
|
||||
/// # }
|
||||
/// # }
|
||||
/// # }
|
||||
///
|
||||
/// impl Parse for PathSegment {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// if input.peek(Token![super])
|
||||
/// || input.peek(Token![self])
|
||||
/// || input.peek(Token![Self])
|
||||
/// || input.peek(Token![crate])
|
||||
/// {
|
||||
/// let ident = input.call(Ident::parse_any)?;
|
||||
/// return Ok(PathSegment::from(ident));
|
||||
/// }
|
||||
///
|
||||
/// let ident = input.parse()?;
|
||||
/// if input.peek(Token![::]) && input.peek3(Token![<]) {
|
||||
/// return Ok(PathSegment {
|
||||
/// ident,
|
||||
/// arguments: PathArguments::AngleBracketed(input.parse()?),
|
||||
/// });
|
||||
/// }
|
||||
/// if input.peek(Token![<]) && !input.peek(Token![<=]) {
|
||||
/// let fork = input.fork();
|
||||
/// if let Ok(arguments) = fork.parse() {
|
||||
/// input.advance_to(&fork);
|
||||
/// return Ok(PathSegment {
|
||||
/// ident,
|
||||
/// arguments: PathArguments::AngleBracketed(arguments),
|
||||
/// });
|
||||
/// }
|
||||
/// }
|
||||
/// Ok(PathSegment::from(ident))
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// # syn::parse_str::<PathSegment>("a<b,c>").unwrap();
|
||||
/// ```
|
||||
///
|
||||
/// # Drawbacks
|
||||
///
|
||||
/// The main drawback of this style of speculative parsing is in error
|
||||
/// presentation. Even if the lookahead is the "correct" parse, the error
|
||||
/// that is shown is that of the "fallback" parse. To use the same example
|
||||
/// as the turbofish above, take the following unfinished "turbofish":
|
||||
///
|
||||
/// ```text
|
||||
/// let _ = f<&'a fn(), for<'a> serde::>();
|
||||
/// ```
|
||||
///
|
||||
/// If this is parsed as generic arguments, we can provide the error message
|
||||
///
|
||||
/// ```text
|
||||
/// error: expected identifier
|
||||
/// --> src.rs:L:C
|
||||
/// |
|
||||
/// L | let _ = f<&'a fn(), for<'a> serde::>();
|
||||
/// | ^
|
||||
/// ```
|
||||
///
|
||||
/// but if parsed using the above speculative parsing, it falls back to
|
||||
/// assuming that the `<` is a less-than when it fails to parse the generic
|
||||
/// arguments, and tries to interpret the `&'a` as the start of a labelled
|
||||
/// loop, resulting in the much less helpful error
|
||||
///
|
||||
/// ```text
|
||||
/// error: expected `:`
|
||||
/// --> src.rs:L:C
|
||||
/// |
|
||||
/// L | let _ = f<&'a fn(), for<'a> serde::>();
|
||||
/// | ^^
|
||||
/// ```
|
||||
///
|
||||
/// This can be mitigated with various heuristics (two examples: show both
|
||||
/// forks' parse errors, or show the one that consumed more tokens), but
|
||||
/// when you can control the grammar, sticking to something that can be
|
||||
/// parsed LL(3) and without the LL(*) speculative parsing this makes
|
||||
/// possible, displaying reasonable errors becomes much more simple.
|
||||
///
|
||||
/// [RFC 2544]: https://github.com/rust-lang/rfcs/pull/2544
|
||||
/// [`PathSegment`]: crate::PathSegment
|
||||
///
|
||||
/// # Performance
|
||||
///
|
||||
/// This method performs a cheap fixed amount of work that does not depend
|
||||
/// on how far apart the two streams are positioned.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// The forked stream in the argument of `advance_to` must have been
|
||||
/// obtained by forking `self`. Attempting to advance to any other stream
|
||||
/// will cause a panic.
|
||||
fn advance_to(&self, fork: &Self);
|
||||
}
|
||||
|
||||
impl<'a> Speculative for ParseBuffer<'a> {
|
||||
fn advance_to(&self, fork: &Self) {
|
||||
if !crate::buffer::same_scope(self.cursor(), fork.cursor()) {
|
||||
panic!("fork was not derived from the advancing parse stream");
|
||||
}
|
||||
|
||||
let (self_unexp, self_sp) = inner_unexpected(self);
|
||||
let (fork_unexp, fork_sp) = inner_unexpected(fork);
|
||||
if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
|
||||
match (fork_sp, self_sp) {
|
||||
// Unexpected set on the fork, but not on `self`, copy it over.
|
||||
(Some((span, delimiter)), None) => {
|
||||
self_unexp.set(Unexpected::Some(span, delimiter));
|
||||
}
|
||||
// Unexpected unset. Use chain to propagate errors from fork.
|
||||
(None, None) => {
|
||||
fork_unexp.set(Unexpected::Chain(self_unexp));
|
||||
|
||||
// Ensure toplevel 'unexpected' tokens from the fork don't
|
||||
// propagate up the chain by replacing the root `unexpected`
|
||||
// pointer, only 'unexpected' tokens from existing group
|
||||
// parsers should propagate.
|
||||
fork.unexpected
|
||||
.set(Some(Rc::new(Cell::new(Unexpected::None))));
|
||||
}
|
||||
// Unexpected has been set on `self`. No changes needed.
|
||||
(_, Some(_)) => {}
|
||||
}
|
||||
}
|
||||
|
||||
// See comment on `cell` in the struct definition.
|
||||
self.cell
|
||||
.set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) });
|
||||
}
|
||||
}
|
||||
|
||||
/// Extensions to the `ParseStream` API to support manipulating invisible
|
||||
/// delimiters the same as if they were visible.
|
||||
pub trait AnyDelimiter {
|
||||
/// Returns the delimiter, the span of the delimiter token, and the nested
|
||||
/// contents for further parsing.
|
||||
fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)>;
|
||||
}
|
||||
|
||||
impl<'a> AnyDelimiter for ParseBuffer<'a> {
|
||||
fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)> {
|
||||
self.step(|cursor| {
|
||||
if let Some((content, delimiter, span, rest)) = cursor.any_group() {
|
||||
let scope = span.close();
|
||||
let nested = crate::parse::advance_step_cursor(cursor, content);
|
||||
let unexpected = crate::parse::get_unexpected(self);
|
||||
let content = crate::parse::new_parse_buffer(scope, nested, unexpected);
|
||||
Ok(((delimiter, span, content), rest))
|
||||
} else {
|
||||
Err(cursor.error("expected any delimiter"))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use std::iter;
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::option;
|
||||
use std::slice;
|
||||
|
||||
#[repr(transparent)]
|
||||
pub(crate) struct NoDrop<T: ?Sized>(ManuallyDrop<T>);
|
||||
|
||||
impl<T> NoDrop<T> {
|
||||
pub(crate) fn new(value: T) -> Self
|
||||
where
|
||||
T: TrivialDrop,
|
||||
{
|
||||
NoDrop(ManuallyDrop::new(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> Deref for NoDrop<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> DerefMut for NoDrop<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait TrivialDrop {}
|
||||
|
||||
impl<T> TrivialDrop for iter::Empty<T> {}
|
||||
impl<T> TrivialDrop for slice::Iter<'_, T> {}
|
||||
impl<T> TrivialDrop for slice::IterMut<'_, T> {}
|
||||
impl<T> TrivialDrop for option::IntoIter<&T> {}
|
||||
impl<T> TrivialDrop for option::IntoIter<&mut T> {}
|
||||
|
||||
#[test]
|
||||
fn test_needs_drop() {
|
||||
use std::mem::needs_drop;
|
||||
|
||||
struct NeedsDrop;
|
||||
|
||||
impl Drop for NeedsDrop {
|
||||
fn drop(&mut self) {}
|
||||
}
|
||||
|
||||
assert!(needs_drop::<NeedsDrop>());
|
||||
|
||||
// Test each of the types with a handwritten TrivialDrop impl above.
|
||||
assert!(!needs_drop::<iter::Empty<NeedsDrop>>());
|
||||
assert!(!needs_drop::<slice::Iter<NeedsDrop>>());
|
||||
assert!(!needs_drop::<slice::IterMut<NeedsDrop>>());
|
||||
assert!(!needs_drop::<option::IntoIter<&NeedsDrop>>());
|
||||
assert!(!needs_drop::<option::IntoIter<&mut NeedsDrop>>());
|
||||
}
|
||||
|
|
@ -0,0 +1,469 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::buffer::Cursor;
|
||||
use crate::thread::ThreadBound;
|
||||
use proc_macro2::{
|
||||
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
|
||||
};
|
||||
#[cfg(feature = "printing")]
|
||||
use quote::ToTokens;
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::slice;
|
||||
use std::vec;
|
||||
|
||||
/// The result of a Syn parser.
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
/// Error returned when a Syn parser cannot parse the input tokens.
|
||||
///
|
||||
/// # Error reporting in proc macros
|
||||
///
|
||||
/// The correct way to report errors back to the compiler from a procedural
|
||||
/// macro is by emitting an appropriately spanned invocation of
|
||||
/// [`compile_error!`] in the generated code. This produces a better diagnostic
|
||||
/// message than simply panicking the macro.
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
///
|
||||
/// When parsing macro input, the [`parse_macro_input!`] macro handles the
|
||||
/// conversion to `compile_error!` automatically.
|
||||
///
|
||||
/// [`parse_macro_input!`]: crate::parse_macro_input!
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::parse::{Parse, ParseStream, Result};
|
||||
/// use syn::{parse_macro_input, ItemFn};
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_attribute]
|
||||
/// # };
|
||||
/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
/// let args = parse_macro_input!(args as MyAttrArgs);
|
||||
/// let input = parse_macro_input!(input as ItemFn);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
///
|
||||
/// struct MyAttrArgs {
|
||||
/// # _k: [(); { stringify! {
|
||||
/// ...
|
||||
/// # }; 0 }]
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for MyAttrArgs {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// # stringify! {
|
||||
/// ...
|
||||
/// # };
|
||||
/// # unimplemented!()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// For errors that arise later than the initial parsing stage, the
|
||||
/// [`.to_compile_error()`] or [`.into_compile_error()`] methods can be used to
|
||||
/// perform an explicit conversion to `compile_error!`.
|
||||
///
|
||||
/// [`.to_compile_error()`]: Error::to_compile_error
|
||||
/// [`.into_compile_error()`]: Error::into_compile_error
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// # use proc_macro::TokenStream;
|
||||
/// # use syn::{parse_macro_input, DeriveInput};
|
||||
/// #
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_derive(MyDerive)]
|
||||
/// # };
|
||||
/// pub fn my_derive(input: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(input as DeriveInput);
|
||||
///
|
||||
/// // fn(DeriveInput) -> syn::Result<proc_macro2::TokenStream>
|
||||
/// expand::my_derive(input)
|
||||
/// .unwrap_or_else(syn::Error::into_compile_error)
|
||||
/// .into()
|
||||
/// }
|
||||
/// #
|
||||
/// # mod expand {
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// # use syn::{DeriveInput, Result};
|
||||
/// #
|
||||
/// # pub fn my_derive(input: DeriveInput) -> Result<TokenStream> {
|
||||
/// # unimplemented!()
|
||||
/// # }
|
||||
/// # }
|
||||
/// ```
|
||||
pub struct Error {
|
||||
messages: Vec<ErrorMessage>,
|
||||
}
|
||||
|
||||
struct ErrorMessage {
|
||||
// Span is implemented as an index into a thread-local interner to keep the
|
||||
// size small. It is not safe to access from a different thread. We want
|
||||
// errors to be Send and Sync to play nicely with ecosystem crates for error
|
||||
// handling, so pin the span we're given to its original thread and assume
|
||||
// it is Span::call_site if accessed from any other thread.
|
||||
span: ThreadBound<SpanRange>,
|
||||
message: String,
|
||||
}
|
||||
|
||||
// Cannot use std::ops::Range<Span> because that does not implement Copy,
|
||||
// whereas ThreadBound<T> requires a Copy impl as a way to ensure no Drop impls
|
||||
// are involved.
|
||||
struct SpanRange {
|
||||
start: Span,
|
||||
end: Span,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
struct _Test
|
||||
where
|
||||
Error: Send + Sync;
|
||||
|
||||
impl Error {
|
||||
/// Usually the [`ParseStream::error`] method will be used instead, which
|
||||
/// automatically uses the correct span from the current position of the
|
||||
/// parse stream.
|
||||
///
|
||||
/// Use `Error::new` when the error needs to be triggered on some span other
|
||||
/// than where the parse stream is currently positioned.
|
||||
///
|
||||
/// [`ParseStream::error`]: crate::parse::ParseBuffer::error
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Error, Ident, LitStr, Result, Token};
|
||||
/// use syn::parse::ParseStream;
|
||||
///
|
||||
/// // Parses input that looks like `name = "string"` where the key must be
|
||||
/// // the identifier `name` and the value may be any string literal.
|
||||
/// // Returns the string literal.
|
||||
/// fn parse_name(input: ParseStream) -> Result<LitStr> {
|
||||
/// let name_token: Ident = input.parse()?;
|
||||
/// if name_token != "name" {
|
||||
/// // Trigger an error not on the current position of the stream,
|
||||
/// // but on the position of the unexpected identifier.
|
||||
/// return Err(Error::new(name_token.span(), "expected `name`"));
|
||||
/// }
|
||||
/// input.parse::<Token![=]>()?;
|
||||
/// let s: LitStr = input.parse()?;
|
||||
/// Ok(s)
|
||||
/// }
|
||||
/// ```
|
||||
pub fn new<T: Display>(span: Span, message: T) -> Self {
|
||||
return new(span, message.to_string());
|
||||
|
||||
fn new(span: Span, message: String) -> Error {
|
||||
Error {
|
||||
messages: vec![ErrorMessage {
|
||||
span: ThreadBound::new(SpanRange {
|
||||
start: span,
|
||||
end: span,
|
||||
}),
|
||||
message,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates an error with the specified message spanning the given syntax
|
||||
/// tree node.
|
||||
///
|
||||
/// Unlike the `Error::new` constructor, this constructor takes an argument
|
||||
/// `tokens` which is a syntax tree node. This allows the resulting `Error`
|
||||
/// to attempt to span all tokens inside of `tokens`. While you would
|
||||
/// typically be able to use the `Spanned` trait with the above `Error::new`
|
||||
/// constructor, implementation limitations today mean that
|
||||
/// `Error::new_spanned` may provide a higher-quality error message on
|
||||
/// stable Rust.
|
||||
///
|
||||
/// When in doubt it's recommended to stick to `Error::new` (or
|
||||
/// `ParseStream::error`)!
|
||||
#[cfg(feature = "printing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self {
|
||||
return new_spanned(tokens.into_token_stream(), message.to_string());
|
||||
|
||||
fn new_spanned(tokens: TokenStream, message: String) -> Error {
|
||||
let mut iter = tokens.into_iter();
|
||||
let start = iter.next().map_or_else(Span::call_site, |t| t.span());
|
||||
let end = iter.last().map_or(start, |t| t.span());
|
||||
Error {
|
||||
messages: vec![ErrorMessage {
|
||||
span: ThreadBound::new(SpanRange { start, end }),
|
||||
message,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The source location of the error.
|
||||
///
|
||||
/// Spans are not thread-safe so this function returns `Span::call_site()`
|
||||
/// if called from a different thread than the one on which the `Error` was
|
||||
/// originally created.
|
||||
pub fn span(&self) -> Span {
|
||||
let SpanRange { start, end } = match self.messages[0].span.get() {
|
||||
Some(span) => *span,
|
||||
None => return Span::call_site(),
|
||||
};
|
||||
start.join(end).unwrap_or(start)
|
||||
}
|
||||
|
||||
/// Render the error as an invocation of [`compile_error!`].
|
||||
///
|
||||
/// The [`parse_macro_input!`] macro provides a convenient way to invoke
|
||||
/// this method correctly in a procedural macro.
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
/// [`parse_macro_input!`]: crate::parse_macro_input!
|
||||
pub fn to_compile_error(&self) -> TokenStream {
|
||||
self.messages
|
||||
.iter()
|
||||
.map(ErrorMessage::to_compile_error)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Render the error as an invocation of [`compile_error!`].
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, DeriveInput, Error};
|
||||
///
|
||||
/// # const _: &str = stringify! {
|
||||
/// #[proc_macro_derive(MyTrait)]
|
||||
/// # };
|
||||
/// pub fn derive_my_trait(input: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(input as DeriveInput);
|
||||
/// my_trait::expand(input)
|
||||
/// .unwrap_or_else(Error::into_compile_error)
|
||||
/// .into()
|
||||
/// }
|
||||
///
|
||||
/// mod my_trait {
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use syn::{DeriveInput, Result};
|
||||
///
|
||||
/// pub(crate) fn expand(input: DeriveInput) -> Result<TokenStream> {
|
||||
/// /* ... */
|
||||
/// # unimplemented!()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub fn into_compile_error(self) -> TokenStream {
|
||||
self.to_compile_error()
|
||||
}
|
||||
|
||||
/// Add another error message to self such that when `to_compile_error()` is
|
||||
/// called, both errors will be emitted together.
|
||||
pub fn combine(&mut self, another: Error) {
|
||||
self.messages.extend(another.messages);
|
||||
}
|
||||
}
|
||||
|
||||
impl ErrorMessage {
|
||||
fn to_compile_error(&self) -> TokenStream {
|
||||
let (start, end) = match self.span.get() {
|
||||
Some(range) => (range.start, range.end),
|
||||
None => (Span::call_site(), Span::call_site()),
|
||||
};
|
||||
|
||||
// ::core::compile_error!($message)
|
||||
TokenStream::from_iter([
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new(':', Spacing::Joint);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new(':', Spacing::Alone);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Ident(Ident::new("core", start)),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new(':', Spacing::Joint);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new(':', Spacing::Alone);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Ident(Ident::new("compile_error", start)),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new('!', Spacing::Alone);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Group({
|
||||
let mut group = Group::new(Delimiter::Brace, {
|
||||
TokenStream::from_iter([TokenTree::Literal({
|
||||
let mut string = Literal::string(&self.message);
|
||||
string.set_span(end);
|
||||
string
|
||||
})])
|
||||
});
|
||||
group.set_span(end);
|
||||
group
|
||||
}),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
|
||||
if cursor.eof() {
|
||||
Error::new(scope, format!("unexpected end of input, {}", message))
|
||||
} else {
|
||||
let span = crate::buffer::open_span_of_group(cursor);
|
||||
Error::new(span, message)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
|
||||
pub(crate) fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
|
||||
return new2(start, end, message.to_string());
|
||||
|
||||
fn new2(start: Span, end: Span, message: String) -> Error {
|
||||
Error {
|
||||
messages: vec![ErrorMessage {
|
||||
span: ThreadBound::new(SpanRange { start, end }),
|
||||
message,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Error {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.messages.len() == 1 {
|
||||
formatter
|
||||
.debug_tuple("Error")
|
||||
.field(&self.messages[0])
|
||||
.finish()
|
||||
} else {
|
||||
formatter
|
||||
.debug_tuple("Error")
|
||||
.field(&self.messages)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for ErrorMessage {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.message, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(&self.messages[0].message)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Error {
|
||||
fn clone(&self) -> Self {
|
||||
Error {
|
||||
messages: self.messages.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for ErrorMessage {
|
||||
fn clone(&self) -> Self {
|
||||
ErrorMessage {
|
||||
span: self.span,
|
||||
message: self.message.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for SpanRange {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl Copy for SpanRange {}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl From<LexError> for Error {
|
||||
fn from(err: LexError) -> Self {
|
||||
Error::new(err.span(), err)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Error {
|
||||
type Item = Error;
|
||||
type IntoIter = IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
IntoIter {
|
||||
messages: self.messages.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IntoIter {
|
||||
messages: vec::IntoIter<ErrorMessage>,
|
||||
}
|
||||
|
||||
impl Iterator for IntoIter {
|
||||
type Item = Error;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
Some(Error {
|
||||
messages: vec![self.messages.next()?],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a Error {
|
||||
type Item = Error;
|
||||
type IntoIter = Iter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
Iter {
|
||||
messages: self.messages.iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Iter<'a> {
|
||||
messages: slice::Iter<'a, ErrorMessage>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Iter<'a> {
|
||||
type Item = Error;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
Some(Error {
|
||||
messages: vec![self.messages.next()?.clone()],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<Error> for Error {
|
||||
fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
|
||||
for err in iter {
|
||||
self.combine(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[doc(hidden)]
|
||||
pub use std::clone::Clone;
|
||||
#[doc(hidden)]
|
||||
pub use std::cmp::{Eq, PartialEq};
|
||||
#[doc(hidden)]
|
||||
pub use std::concat;
|
||||
#[doc(hidden)]
|
||||
pub use std::default::Default;
|
||||
#[doc(hidden)]
|
||||
pub use std::fmt::Debug;
|
||||
#[doc(hidden)]
|
||||
pub use std::hash::{Hash, Hasher};
|
||||
#[doc(hidden)]
|
||||
pub use std::marker::Copy;
|
||||
#[doc(hidden)]
|
||||
pub use std::option::Option::{None, Some};
|
||||
#[doc(hidden)]
|
||||
pub use std::result::Result::{Err, Ok};
|
||||
#[doc(hidden)]
|
||||
pub use std::stringify;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub type Formatter<'a> = std::fmt::Formatter<'a>;
|
||||
#[doc(hidden)]
|
||||
pub type FmtResult = std::fmt::Result;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub type bool = std::primitive::bool;
|
||||
#[doc(hidden)]
|
||||
pub type str = std::primitive::str;
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
pub use quote;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub type Span = proc_macro2::Span;
|
||||
#[doc(hidden)]
|
||||
pub type TokenStream2 = proc_macro2::TokenStream;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
pub use crate::group::{parse_braces, parse_brackets, parse_parens};
|
||||
|
||||
#[doc(hidden)]
|
||||
pub use crate::span::IntoSpans;
|
||||
|
||||
#[cfg(all(feature = "parsing", feature = "printing"))]
|
||||
#[doc(hidden)]
|
||||
pub use crate::parse_quote::parse as parse_quote;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
pub use crate::token::parsing::{peek_punct, punct as parse_punct};
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
pub use crate::token::printing::punct as print_punct;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
pub use crate::token::private::CustomToken;
|
||||
|
||||
#[cfg(feature = "proc-macro")]
|
||||
#[doc(hidden)]
|
||||
pub type TokenStream = proc_macro::TokenStream;
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
pub use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct private(pub(crate) ());
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,138 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
//! Extension traits to provide parsing methods on foreign types.
|
||||
|
||||
use crate::buffer::Cursor;
|
||||
use crate::error::Result;
|
||||
use crate::parse::ParseStream;
|
||||
use crate::parse::Peek;
|
||||
use crate::sealed::lookahead;
|
||||
use crate::token::CustomToken;
|
||||
use proc_macro2::Ident;
|
||||
|
||||
/// Additional methods for `Ident` not provided by proc-macro2 or libproc_macro.
|
||||
///
|
||||
/// This trait is sealed and cannot be implemented for types outside of Syn. It
|
||||
/// is implemented only for `proc_macro2::Ident`.
|
||||
pub trait IdentExt: Sized + private::Sealed {
|
||||
/// Parses any identifier including keywords.
|
||||
///
|
||||
/// This is useful when parsing macro input which allows Rust keywords as
|
||||
/// identifiers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Error, Ident, Result, Token};
|
||||
/// use syn::ext::IdentExt;
|
||||
/// use syn::parse::ParseStream;
|
||||
///
|
||||
/// mod kw {
|
||||
/// syn::custom_keyword!(name);
|
||||
/// }
|
||||
///
|
||||
/// // Parses input that looks like `name = NAME` where `NAME` can be
|
||||
/// // any identifier.
|
||||
/// //
|
||||
/// // Examples:
|
||||
/// //
|
||||
/// // name = anything
|
||||
/// // name = impl
|
||||
/// fn parse_dsl(input: ParseStream) -> Result<Ident> {
|
||||
/// input.parse::<kw::name>()?;
|
||||
/// input.parse::<Token![=]>()?;
|
||||
/// let name = input.call(Ident::parse_any)?;
|
||||
/// Ok(name)
|
||||
/// }
|
||||
/// ```
|
||||
fn parse_any(input: ParseStream) -> Result<Self>;
|
||||
|
||||
/// Peeks any identifier including keywords. Usage:
|
||||
/// `input.peek(Ident::peek_any)`
|
||||
///
|
||||
/// This is different from `input.peek(Ident)` which only returns true in
|
||||
/// the case of an ident which is not a Rust keyword.
|
||||
#[allow(non_upper_case_globals)]
|
||||
const peek_any: private::PeekFn = private::PeekFn;
|
||||
|
||||
/// Strips the raw marker `r#`, if any, from the beginning of an ident.
|
||||
///
|
||||
/// - unraw(`x`) = `x`
|
||||
/// - unraw(`move`) = `move`
|
||||
/// - unraw(`r#move`) = `move`
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// In the case of interop with other languages like Python that have a
|
||||
/// different set of keywords than Rust, we might come across macro input
|
||||
/// that involves raw identifiers to refer to ordinary variables in the
|
||||
/// other language with a name that happens to be a Rust keyword.
|
||||
///
|
||||
/// The function below appends an identifier from the caller's input onto a
|
||||
/// fixed prefix. Without using `unraw()`, this would tend to produce
|
||||
/// invalid identifiers like `__pyo3_get_r#move`.
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::Span;
|
||||
/// use syn::Ident;
|
||||
/// use syn::ext::IdentExt;
|
||||
///
|
||||
/// fn ident_for_getter(variable: &Ident) -> Ident {
|
||||
/// let getter = format!("__pyo3_get_{}", variable.unraw());
|
||||
/// Ident::new(&getter, Span::call_site())
|
||||
/// }
|
||||
/// ```
|
||||
fn unraw(&self) -> Ident;
|
||||
}
|
||||
|
||||
impl IdentExt for Ident {
|
||||
fn parse_any(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| match cursor.ident() {
|
||||
Some((ident, rest)) => Ok((ident, rest)),
|
||||
None => Err(cursor.error("expected ident")),
|
||||
})
|
||||
}
|
||||
|
||||
fn unraw(&self) -> Ident {
|
||||
let string = self.to_string();
|
||||
if let Some(string) = string.strip_prefix("r#") {
|
||||
Ident::new(string, self.span())
|
||||
} else {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Peek for private::PeekFn {
|
||||
type Token = private::IdentAny;
|
||||
}
|
||||
|
||||
impl CustomToken for private::IdentAny {
|
||||
fn peek(cursor: Cursor) -> bool {
|
||||
cursor.ident().is_some()
|
||||
}
|
||||
|
||||
fn display() -> &'static str {
|
||||
"identifier"
|
||||
}
|
||||
}
|
||||
|
||||
impl lookahead::Sealed for private::PeekFn {}
|
||||
|
||||
mod private {
|
||||
use proc_macro2::Ident;
|
||||
|
||||
pub trait Sealed {}
|
||||
|
||||
impl Sealed for Ident {}
|
||||
|
||||
pub struct PeekFn;
|
||||
pub struct IdentAny;
|
||||
|
||||
impl Copy for PeekFn {}
|
||||
impl Clone for PeekFn {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,127 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::attr::Attribute;
|
||||
use crate::item::Item;
|
||||
|
||||
ast_struct! {
|
||||
/// A complete file of Rust source code.
|
||||
///
|
||||
/// Typically `File` objects are created with [`parse_file`].
|
||||
///
|
||||
/// [`parse_file`]: crate::parse_file
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Parse a Rust source file into a `syn::File` and print out a debug
|
||||
/// representation of the syntax tree.
|
||||
///
|
||||
/// ```
|
||||
/// use std::env;
|
||||
/// use std::fs;
|
||||
/// use std::process;
|
||||
///
|
||||
/// fn main() {
|
||||
/// # }
|
||||
/// #
|
||||
/// # fn fake_main() {
|
||||
/// let mut args = env::args();
|
||||
/// let _ = args.next(); // executable name
|
||||
///
|
||||
/// let filename = match (args.next(), args.next()) {
|
||||
/// (Some(filename), None) => filename,
|
||||
/// _ => {
|
||||
/// eprintln!("Usage: dump-syntax path/to/filename.rs");
|
||||
/// process::exit(1);
|
||||
/// }
|
||||
/// };
|
||||
///
|
||||
/// let src = fs::read_to_string(&filename).expect("unable to read file");
|
||||
/// let syntax = syn::parse_file(&src).expect("unable to parse file");
|
||||
///
|
||||
/// // Debug impl is available if Syn is built with "extra-traits" feature.
|
||||
/// println!("{:#?}", syntax);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Running with its own source code as input, this program prints output
|
||||
/// that begins with:
|
||||
///
|
||||
/// ```text
|
||||
/// File {
|
||||
/// shebang: None,
|
||||
/// attrs: [],
|
||||
/// items: [
|
||||
/// Use(
|
||||
/// ItemUse {
|
||||
/// attrs: [],
|
||||
/// vis: Inherited,
|
||||
/// use_token: Use,
|
||||
/// leading_colon: None,
|
||||
/// tree: Path(
|
||||
/// UsePath {
|
||||
/// ident: Ident(
|
||||
/// std,
|
||||
/// ),
|
||||
/// colon2_token: Colon2,
|
||||
/// tree: Name(
|
||||
/// UseName {
|
||||
/// ident: Ident(
|
||||
/// env,
|
||||
/// ),
|
||||
/// },
|
||||
/// ),
|
||||
/// },
|
||||
/// ),
|
||||
/// semi_token: Semi,
|
||||
/// },
|
||||
/// ),
|
||||
/// ...
|
||||
/// ```
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct File {
|
||||
pub shebang: Option<String>,
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub items: Vec<Item>,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::attr::Attribute;
|
||||
use crate::error::Result;
|
||||
use crate::file::File;
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for File {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(File {
|
||||
shebang: None,
|
||||
attrs: input.call(Attribute::parse_inner)?,
|
||||
items: {
|
||||
let mut items = Vec::new();
|
||||
while !input.is_empty() {
|
||||
items.push(input.parse()?);
|
||||
}
|
||||
items
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::file::File;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for File {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.inner());
|
||||
tokens.append_all(&self.items);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,775 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::classify;
|
||||
use crate::expr::Expr;
|
||||
#[cfg(feature = "full")]
|
||||
use crate::expr::{
|
||||
ExprBreak, ExprRange, ExprRawAddr, ExprReference, ExprReturn, ExprUnary, ExprYield,
|
||||
};
|
||||
use crate::precedence::Precedence;
|
||||
#[cfg(feature = "full")]
|
||||
use crate::ty::ReturnType;
|
||||
|
||||
pub(crate) struct FixupContext {
|
||||
#[cfg(feature = "full")]
|
||||
previous_operator: Precedence,
|
||||
#[cfg(feature = "full")]
|
||||
next_operator: Precedence,
|
||||
|
||||
// Print expression such that it can be parsed back as a statement
|
||||
// consisting of the original expression.
|
||||
//
|
||||
// The effect of this is for binary operators in statement position to set
|
||||
// `leftmost_subexpression_in_stmt` when printing their left-hand operand.
|
||||
//
|
||||
// (match x {}) - 1; // match needs parens when LHS of binary operator
|
||||
//
|
||||
// match x {}; // not when its own statement
|
||||
//
|
||||
#[cfg(feature = "full")]
|
||||
stmt: bool,
|
||||
|
||||
// This is the difference between:
|
||||
//
|
||||
// (match x {}) - 1; // subexpression needs parens
|
||||
//
|
||||
// let _ = match x {} - 1; // no parens
|
||||
//
|
||||
// There are 3 distinguishable contexts in which `print_expr` might be
|
||||
// called with the expression `$match` as its argument, where `$match`
|
||||
// represents an expression of kind `ExprKind::Match`:
|
||||
//
|
||||
// - stmt=false leftmost_subexpression_in_stmt=false
|
||||
//
|
||||
// Example: `let _ = $match - 1;`
|
||||
//
|
||||
// No parentheses required.
|
||||
//
|
||||
// - stmt=false leftmost_subexpression_in_stmt=true
|
||||
//
|
||||
// Example: `$match - 1;`
|
||||
//
|
||||
// Must parenthesize `($match)`, otherwise parsing back the output as a
|
||||
// statement would terminate the statement after the closing brace of
|
||||
// the match, parsing `-1;` as a separate statement.
|
||||
//
|
||||
// - stmt=true leftmost_subexpression_in_stmt=false
|
||||
//
|
||||
// Example: `$match;`
|
||||
//
|
||||
// No parentheses required.
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_stmt: bool,
|
||||
|
||||
// Print expression such that it can be parsed as a match arm.
|
||||
//
|
||||
// This is almost equivalent to `stmt`, but the grammar diverges a tiny bit
|
||||
// between statements and match arms when it comes to braced macro calls.
|
||||
// Macro calls with brace delimiter terminate a statement without a
|
||||
// semicolon, but do not terminate a match-arm without comma.
|
||||
//
|
||||
// m! {} - 1; // two statements: a macro call followed by -1 literal
|
||||
//
|
||||
// match () {
|
||||
// _ => m! {} - 1, // binary subtraction operator
|
||||
// }
|
||||
//
|
||||
#[cfg(feature = "full")]
|
||||
match_arm: bool,
|
||||
|
||||
// This is almost equivalent to `leftmost_subexpression_in_stmt`, other than
|
||||
// for braced macro calls.
|
||||
//
|
||||
// If we have `m! {} - 1` as an expression, the leftmost subexpression
|
||||
// `m! {}` will need to be parenthesized in the statement case but not the
|
||||
// match-arm case.
|
||||
//
|
||||
// (m! {}) - 1; // subexpression needs parens
|
||||
//
|
||||
// match () {
|
||||
// _ => m! {} - 1, // no parens
|
||||
// }
|
||||
//
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_match_arm: bool,
|
||||
|
||||
// This is the difference between:
|
||||
//
|
||||
// if let _ = (Struct {}) {} // needs parens
|
||||
//
|
||||
// match () {
|
||||
// () if let _ = Struct {} => {} // no parens
|
||||
// }
|
||||
//
|
||||
#[cfg(feature = "full")]
|
||||
condition: bool,
|
||||
|
||||
// This is the difference between:
|
||||
//
|
||||
// if break Struct {} == (break) {} // needs parens
|
||||
//
|
||||
// if break break == Struct {} {} // no parens
|
||||
//
|
||||
#[cfg(feature = "full")]
|
||||
rightmost_subexpression_in_condition: bool,
|
||||
|
||||
// This is the difference between:
|
||||
//
|
||||
// if break ({ x }).field + 1 {} needs parens
|
||||
//
|
||||
// if break 1 + { x }.field {} // no parens
|
||||
//
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_optional_operand: bool,
|
||||
|
||||
// This is the difference between:
|
||||
//
|
||||
// let _ = (return) - 1; // without paren, this would return -1
|
||||
//
|
||||
// let _ = return + 1; // no paren because '+' cannot begin expr
|
||||
//
|
||||
#[cfg(feature = "full")]
|
||||
next_operator_can_begin_expr: bool,
|
||||
|
||||
// This is the difference between:
|
||||
//
|
||||
// let _ = 1 + return 1; // no parens if rightmost subexpression
|
||||
//
|
||||
// let _ = 1 + (return 1) + 1; // needs parens
|
||||
//
|
||||
#[cfg(feature = "full")]
|
||||
next_operator_can_continue_expr: bool,
|
||||
|
||||
// This is the difference between:
|
||||
//
|
||||
// let _ = x as u8 + T;
|
||||
//
|
||||
// let _ = (x as u8) < T;
|
||||
//
|
||||
// Without parens, the latter would want to parse `u8<T...` as a type.
|
||||
next_operator_can_begin_generics: bool,
|
||||
}
|
||||
|
||||
impl FixupContext {
|
||||
/// The default amount of fixing is minimal fixing. Fixups should be turned
|
||||
/// on in a targeted fashion where needed.
|
||||
pub const NONE: Self = FixupContext {
|
||||
#[cfg(feature = "full")]
|
||||
previous_operator: Precedence::MIN,
|
||||
#[cfg(feature = "full")]
|
||||
next_operator: Precedence::MIN,
|
||||
#[cfg(feature = "full")]
|
||||
stmt: false,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_stmt: false,
|
||||
#[cfg(feature = "full")]
|
||||
match_arm: false,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_match_arm: false,
|
||||
#[cfg(feature = "full")]
|
||||
condition: false,
|
||||
#[cfg(feature = "full")]
|
||||
rightmost_subexpression_in_condition: false,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_optional_operand: false,
|
||||
#[cfg(feature = "full")]
|
||||
next_operator_can_begin_expr: false,
|
||||
#[cfg(feature = "full")]
|
||||
next_operator_can_continue_expr: false,
|
||||
next_operator_can_begin_generics: false,
|
||||
};
|
||||
|
||||
/// Create the initial fixup for printing an expression in statement
|
||||
/// position.
|
||||
#[cfg(feature = "full")]
|
||||
pub fn new_stmt() -> Self {
|
||||
FixupContext {
|
||||
stmt: true,
|
||||
..FixupContext::NONE
|
||||
}
|
||||
}
|
||||
|
||||
/// Create the initial fixup for printing an expression as the right-hand
|
||||
/// side of a match arm.
|
||||
#[cfg(feature = "full")]
|
||||
pub fn new_match_arm() -> Self {
|
||||
FixupContext {
|
||||
match_arm: true,
|
||||
..FixupContext::NONE
|
||||
}
|
||||
}
|
||||
|
||||
/// Create the initial fixup for printing an expression as the "condition"
|
||||
/// of an `if` or `while`. There are a few other positions which are
|
||||
/// grammatically equivalent and also use this, such as the iterator
|
||||
/// expression in `for` and the scrutinee in `match`.
|
||||
#[cfg(feature = "full")]
|
||||
pub fn new_condition() -> Self {
|
||||
FixupContext {
|
||||
condition: true,
|
||||
rightmost_subexpression_in_condition: true,
|
||||
..FixupContext::NONE
|
||||
}
|
||||
}
|
||||
|
||||
/// Transform this fixup into the one that should apply when printing the
|
||||
/// leftmost subexpression of the current expression.
|
||||
///
|
||||
/// The leftmost subexpression is any subexpression that has the same first
|
||||
/// token as the current expression, but has a different last token.
|
||||
///
|
||||
/// For example in `$a + $b` and `$a.method()`, the subexpression `$a` is a
|
||||
/// leftmost subexpression.
|
||||
///
|
||||
/// Not every expression has a leftmost subexpression. For example neither
|
||||
/// `-$a` nor `[$a]` have one.
|
||||
pub fn leftmost_subexpression_with_operator(
|
||||
self,
|
||||
expr: &Expr,
|
||||
#[cfg(feature = "full")] next_operator_can_begin_expr: bool,
|
||||
next_operator_can_begin_generics: bool,
|
||||
#[cfg(feature = "full")] precedence: Precedence,
|
||||
) -> (Precedence, Self) {
|
||||
let fixup = FixupContext {
|
||||
#[cfg(feature = "full")]
|
||||
next_operator: precedence,
|
||||
#[cfg(feature = "full")]
|
||||
stmt: false,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_stmt: self.stmt || self.leftmost_subexpression_in_stmt,
|
||||
#[cfg(feature = "full")]
|
||||
match_arm: false,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_match_arm: self.match_arm
|
||||
|| self.leftmost_subexpression_in_match_arm,
|
||||
#[cfg(feature = "full")]
|
||||
rightmost_subexpression_in_condition: false,
|
||||
#[cfg(feature = "full")]
|
||||
next_operator_can_begin_expr,
|
||||
#[cfg(feature = "full")]
|
||||
next_operator_can_continue_expr: true,
|
||||
next_operator_can_begin_generics,
|
||||
..self
|
||||
};
|
||||
|
||||
(fixup.leftmost_subexpression_precedence(expr), fixup)
|
||||
}
|
||||
|
||||
/// Transform this fixup into the one that should apply when printing a
|
||||
/// leftmost subexpression followed by a `.` or `?` token, which confer
|
||||
/// different statement boundary rules compared to other leftmost
|
||||
/// subexpressions.
|
||||
pub fn leftmost_subexpression_with_dot(self, expr: &Expr) -> (Precedence, Self) {
|
||||
let fixup = FixupContext {
|
||||
#[cfg(feature = "full")]
|
||||
next_operator: Precedence::Unambiguous,
|
||||
#[cfg(feature = "full")]
|
||||
stmt: self.stmt || self.leftmost_subexpression_in_stmt,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_stmt: false,
|
||||
#[cfg(feature = "full")]
|
||||
match_arm: self.match_arm || self.leftmost_subexpression_in_match_arm,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_match_arm: false,
|
||||
#[cfg(feature = "full")]
|
||||
rightmost_subexpression_in_condition: false,
|
||||
#[cfg(feature = "full")]
|
||||
next_operator_can_begin_expr: false,
|
||||
#[cfg(feature = "full")]
|
||||
next_operator_can_continue_expr: true,
|
||||
next_operator_can_begin_generics: false,
|
||||
..self
|
||||
};
|
||||
|
||||
(fixup.leftmost_subexpression_precedence(expr), fixup)
|
||||
}
|
||||
|
||||
fn leftmost_subexpression_precedence(self, expr: &Expr) -> Precedence {
|
||||
#[cfg(feature = "full")]
|
||||
if !self.next_operator_can_begin_expr || self.next_operator == Precedence::Range {
|
||||
if let Scan::Bailout = scan_right(expr, self, Precedence::MIN, 0, 0) {
|
||||
if scan_left(expr, self) {
|
||||
return Precedence::Unambiguous;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.precedence(expr)
|
||||
}
|
||||
|
||||
/// Transform this fixup into the one that should apply when printing the
|
||||
/// rightmost subexpression of the current expression.
|
||||
///
|
||||
/// The rightmost subexpression is any subexpression that has a different
|
||||
/// first token than the current expression, but has the same last token.
|
||||
///
|
||||
/// For example in `$a + $b` and `-$b`, the subexpression `$b` is a
|
||||
/// rightmost subexpression.
|
||||
///
|
||||
/// Not every expression has a rightmost subexpression. For example neither
|
||||
/// `[$b]` nor `$a.f($b)` have one.
|
||||
pub fn rightmost_subexpression(
|
||||
self,
|
||||
expr: &Expr,
|
||||
#[cfg(feature = "full")] precedence: Precedence,
|
||||
) -> (Precedence, Self) {
|
||||
let fixup = self.rightmost_subexpression_fixup(
|
||||
#[cfg(feature = "full")]
|
||||
false,
|
||||
#[cfg(feature = "full")]
|
||||
false,
|
||||
#[cfg(feature = "full")]
|
||||
precedence,
|
||||
);
|
||||
(fixup.rightmost_subexpression_precedence(expr), fixup)
|
||||
}
|
||||
|
||||
pub fn rightmost_subexpression_fixup(
|
||||
self,
|
||||
#[cfg(feature = "full")] reset_allow_struct: bool,
|
||||
#[cfg(feature = "full")] optional_operand: bool,
|
||||
#[cfg(feature = "full")] precedence: Precedence,
|
||||
) -> Self {
|
||||
FixupContext {
|
||||
#[cfg(feature = "full")]
|
||||
previous_operator: precedence,
|
||||
#[cfg(feature = "full")]
|
||||
stmt: false,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_stmt: false,
|
||||
#[cfg(feature = "full")]
|
||||
match_arm: false,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_match_arm: false,
|
||||
#[cfg(feature = "full")]
|
||||
condition: self.condition && !reset_allow_struct,
|
||||
#[cfg(feature = "full")]
|
||||
leftmost_subexpression_in_optional_operand: self.condition && optional_operand,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rightmost_subexpression_precedence(self, expr: &Expr) -> Precedence {
|
||||
let default_prec = self.precedence(expr);
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
if match self.previous_operator {
|
||||
Precedence::Assign | Precedence::Let | Precedence::Prefix => {
|
||||
default_prec < self.previous_operator
|
||||
}
|
||||
_ => default_prec <= self.previous_operator,
|
||||
} && match self.next_operator {
|
||||
Precedence::Range | Precedence::Or | Precedence::And => true,
|
||||
_ => !self.next_operator_can_begin_expr,
|
||||
} {
|
||||
if let Scan::Bailout | Scan::Fail = scan_right(expr, self, self.previous_operator, 1, 0)
|
||||
{
|
||||
if scan_left(expr, self) {
|
||||
return Precedence::Prefix;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
default_prec
|
||||
}
|
||||
|
||||
/// Determine whether parentheses are needed around the given expression to
|
||||
/// head off the early termination of a statement or condition.
|
||||
#[cfg(feature = "full")]
|
||||
pub fn parenthesize(self, expr: &Expr) -> bool {
|
||||
(self.leftmost_subexpression_in_stmt && !classify::requires_semi_to_be_stmt(expr))
|
||||
|| ((self.stmt || self.leftmost_subexpression_in_stmt) && matches!(expr, Expr::Let(_)))
|
||||
|| (self.leftmost_subexpression_in_match_arm
|
||||
&& !classify::requires_comma_to_be_match_arm(expr))
|
||||
|| (self.condition && matches!(expr, Expr::Struct(_)))
|
||||
|| (self.rightmost_subexpression_in_condition
|
||||
&& matches!(
|
||||
expr,
|
||||
Expr::Return(ExprReturn { expr: None, .. })
|
||||
| Expr::Yield(ExprYield { expr: None, .. })
|
||||
))
|
||||
|| (self.rightmost_subexpression_in_condition
|
||||
&& !self.condition
|
||||
&& matches!(
|
||||
expr,
|
||||
Expr::Break(ExprBreak { expr: None, .. })
|
||||
| Expr::Path(_)
|
||||
| Expr::Range(ExprRange { end: None, .. })
|
||||
))
|
||||
|| (self.leftmost_subexpression_in_optional_operand
|
||||
&& matches!(expr, Expr::Block(expr) if expr.attrs.is_empty() && expr.label.is_none()))
|
||||
}
|
||||
|
||||
/// Determines the effective precedence of a subexpression. Some expressions
|
||||
/// have higher or lower precedence when adjacent to particular operators.
|
||||
fn precedence(self, expr: &Expr) -> Precedence {
|
||||
#[cfg(feature = "full")]
|
||||
if self.next_operator_can_begin_expr {
|
||||
// Decrease precedence of value-less jumps when followed by an
|
||||
// operator that would otherwise get interpreted as beginning a
|
||||
// value for the jump.
|
||||
if let Expr::Break(ExprBreak { expr: None, .. })
|
||||
| Expr::Return(ExprReturn { expr: None, .. })
|
||||
| Expr::Yield(ExprYield { expr: None, .. }) = expr
|
||||
{
|
||||
return Precedence::Jump;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
if !self.next_operator_can_continue_expr {
|
||||
match expr {
|
||||
// Increase precedence of expressions that extend to the end of
|
||||
// current statement or group.
|
||||
Expr::Break(_)
|
||||
| Expr::Closure(_)
|
||||
| Expr::Let(_)
|
||||
| Expr::Return(_)
|
||||
| Expr::Yield(_) => {
|
||||
return Precedence::Prefix;
|
||||
}
|
||||
Expr::Range(e) if e.start.is_none() => return Precedence::Prefix,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if self.next_operator_can_begin_generics {
|
||||
if let Expr::Cast(cast) = expr {
|
||||
if classify::trailing_unparameterized_path(&cast.ty) {
|
||||
return Precedence::MIN;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Precedence::of(expr)
|
||||
}
|
||||
}
|
||||
|
||||
impl Copy for FixupContext {}
|
||||
|
||||
impl Clone for FixupContext {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
enum Scan {
|
||||
Fail,
|
||||
Bailout,
|
||||
Consume,
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl Copy for Scan {}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl Clone for Scan {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl PartialEq for Scan {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
*self as u8 == *other as u8
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
fn scan_left(expr: &Expr, fixup: FixupContext) -> bool {
|
||||
match expr {
|
||||
Expr::Assign(_) => fixup.previous_operator <= Precedence::Assign,
|
||||
Expr::Binary(e) => match Precedence::of_binop(&e.op) {
|
||||
Precedence::Assign => fixup.previous_operator <= Precedence::Assign,
|
||||
binop_prec => fixup.previous_operator < binop_prec,
|
||||
},
|
||||
Expr::Cast(_) => fixup.previous_operator < Precedence::Cast,
|
||||
Expr::Range(e) => e.start.is_none() || fixup.previous_operator < Precedence::Assign,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
fn scan_right(
|
||||
expr: &Expr,
|
||||
fixup: FixupContext,
|
||||
precedence: Precedence,
|
||||
fail_offset: u8,
|
||||
bailout_offset: u8,
|
||||
) -> Scan {
|
||||
let consume_by_precedence = if match precedence {
|
||||
Precedence::Assign | Precedence::Compare => precedence <= fixup.next_operator,
|
||||
_ => precedence < fixup.next_operator,
|
||||
} || fixup.next_operator == Precedence::MIN
|
||||
{
|
||||
Scan::Consume
|
||||
} else {
|
||||
Scan::Bailout
|
||||
};
|
||||
if fixup.parenthesize(expr) {
|
||||
return consume_by_precedence;
|
||||
}
|
||||
match expr {
|
||||
Expr::Assign(e) if e.attrs.is_empty() => {
|
||||
if match fixup.next_operator {
|
||||
Precedence::Unambiguous => fail_offset >= 2,
|
||||
_ => bailout_offset >= 1,
|
||||
} {
|
||||
return Scan::Consume;
|
||||
}
|
||||
let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Assign);
|
||||
let scan = scan_right(
|
||||
&e.right,
|
||||
right_fixup,
|
||||
Precedence::Assign,
|
||||
match fixup.next_operator {
|
||||
Precedence::Unambiguous => fail_offset,
|
||||
_ => 1,
|
||||
},
|
||||
1,
|
||||
);
|
||||
if let Scan::Bailout | Scan::Consume = scan {
|
||||
Scan::Consume
|
||||
} else if let Precedence::Unambiguous = fixup.next_operator {
|
||||
Scan::Fail
|
||||
} else {
|
||||
Scan::Bailout
|
||||
}
|
||||
}
|
||||
Expr::Binary(e) if e.attrs.is_empty() => {
|
||||
if match fixup.next_operator {
|
||||
Precedence::Unambiguous => {
|
||||
fail_offset >= 2
|
||||
&& (consume_by_precedence == Scan::Consume || bailout_offset >= 1)
|
||||
}
|
||||
_ => bailout_offset >= 1,
|
||||
} {
|
||||
return Scan::Consume;
|
||||
}
|
||||
let binop_prec = Precedence::of_binop(&e.op);
|
||||
if binop_prec == Precedence::Compare && fixup.next_operator == Precedence::Compare {
|
||||
return Scan::Consume;
|
||||
}
|
||||
let right_fixup = fixup.rightmost_subexpression_fixup(false, false, binop_prec);
|
||||
let scan = scan_right(
|
||||
&e.right,
|
||||
right_fixup,
|
||||
binop_prec,
|
||||
match fixup.next_operator {
|
||||
Precedence::Unambiguous => fail_offset,
|
||||
_ => 1,
|
||||
},
|
||||
consume_by_precedence as u8 - Scan::Bailout as u8,
|
||||
);
|
||||
match scan {
|
||||
Scan::Fail => {}
|
||||
Scan::Bailout => return consume_by_precedence,
|
||||
Scan::Consume => return Scan::Consume,
|
||||
}
|
||||
let right_needs_group = binop_prec != Precedence::Assign
|
||||
&& right_fixup.rightmost_subexpression_precedence(&e.right) <= binop_prec;
|
||||
if right_needs_group {
|
||||
consume_by_precedence
|
||||
} else if let (Scan::Fail, Precedence::Unambiguous) = (scan, fixup.next_operator) {
|
||||
Scan::Fail
|
||||
} else {
|
||||
Scan::Bailout
|
||||
}
|
||||
}
|
||||
Expr::RawAddr(ExprRawAddr { expr, .. })
|
||||
| Expr::Reference(ExprReference { expr, .. })
|
||||
| Expr::Unary(ExprUnary { expr, .. }) => {
|
||||
if match fixup.next_operator {
|
||||
Precedence::Unambiguous => {
|
||||
fail_offset >= 2
|
||||
&& (consume_by_precedence == Scan::Consume || bailout_offset >= 1)
|
||||
}
|
||||
_ => bailout_offset >= 1,
|
||||
} {
|
||||
return Scan::Consume;
|
||||
}
|
||||
let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Prefix);
|
||||
let scan = scan_right(
|
||||
expr,
|
||||
right_fixup,
|
||||
precedence,
|
||||
match fixup.next_operator {
|
||||
Precedence::Unambiguous => fail_offset,
|
||||
_ => 1,
|
||||
},
|
||||
consume_by_precedence as u8 - Scan::Bailout as u8,
|
||||
);
|
||||
match scan {
|
||||
Scan::Fail => {}
|
||||
Scan::Bailout => return consume_by_precedence,
|
||||
Scan::Consume => return Scan::Consume,
|
||||
}
|
||||
if right_fixup.rightmost_subexpression_precedence(expr) < Precedence::Prefix {
|
||||
consume_by_precedence
|
||||
} else if let (Scan::Fail, Precedence::Unambiguous) = (scan, fixup.next_operator) {
|
||||
Scan::Fail
|
||||
} else {
|
||||
Scan::Bailout
|
||||
}
|
||||
}
|
||||
Expr::Range(e) if e.attrs.is_empty() => match &e.end {
|
||||
Some(end) => {
|
||||
if fail_offset >= 2 {
|
||||
return Scan::Consume;
|
||||
}
|
||||
let right_fixup =
|
||||
fixup.rightmost_subexpression_fixup(false, true, Precedence::Range);
|
||||
let scan = scan_right(
|
||||
end,
|
||||
right_fixup,
|
||||
Precedence::Range,
|
||||
fail_offset,
|
||||
match fixup.next_operator {
|
||||
Precedence::Assign | Precedence::Range => 0,
|
||||
_ => 1,
|
||||
},
|
||||
);
|
||||
if match (scan, fixup.next_operator) {
|
||||
(Scan::Fail, _) => false,
|
||||
(Scan::Bailout, Precedence::Assign | Precedence::Range) => false,
|
||||
(Scan::Bailout | Scan::Consume, _) => true,
|
||||
} {
|
||||
return Scan::Consume;
|
||||
}
|
||||
if right_fixup.rightmost_subexpression_precedence(end) <= Precedence::Range {
|
||||
Scan::Consume
|
||||
} else {
|
||||
Scan::Fail
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if fixup.next_operator_can_begin_expr {
|
||||
Scan::Consume
|
||||
} else {
|
||||
Scan::Fail
|
||||
}
|
||||
}
|
||||
},
|
||||
Expr::Break(e) => match &e.expr {
|
||||
Some(value) => {
|
||||
if bailout_offset >= 1 || e.label.is_none() && classify::expr_leading_label(value) {
|
||||
return Scan::Consume;
|
||||
}
|
||||
let right_fixup = fixup.rightmost_subexpression_fixup(true, true, Precedence::Jump);
|
||||
match scan_right(value, right_fixup, Precedence::Jump, 1, 1) {
|
||||
Scan::Fail => Scan::Bailout,
|
||||
Scan::Bailout | Scan::Consume => Scan::Consume,
|
||||
}
|
||||
}
|
||||
None => match fixup.next_operator {
|
||||
Precedence::Assign if precedence > Precedence::Assign => Scan::Fail,
|
||||
_ => Scan::Consume,
|
||||
},
|
||||
},
|
||||
Expr::Return(ExprReturn { expr, .. }) | Expr::Yield(ExprYield { expr, .. }) => match expr {
|
||||
Some(e) => {
|
||||
if bailout_offset >= 1 {
|
||||
return Scan::Consume;
|
||||
}
|
||||
let right_fixup =
|
||||
fixup.rightmost_subexpression_fixup(true, false, Precedence::Jump);
|
||||
match scan_right(e, right_fixup, Precedence::Jump, 1, 1) {
|
||||
Scan::Fail => Scan::Bailout,
|
||||
Scan::Bailout | Scan::Consume => Scan::Consume,
|
||||
}
|
||||
}
|
||||
None => match fixup.next_operator {
|
||||
Precedence::Assign if precedence > Precedence::Assign => Scan::Fail,
|
||||
_ => Scan::Consume,
|
||||
},
|
||||
},
|
||||
Expr::Closure(e) => {
|
||||
if matches!(e.output, ReturnType::Default)
|
||||
|| matches!(&*e.body, Expr::Block(body) if body.attrs.is_empty() && body.label.is_none())
|
||||
{
|
||||
if bailout_offset >= 1 {
|
||||
return Scan::Consume;
|
||||
}
|
||||
let right_fixup =
|
||||
fixup.rightmost_subexpression_fixup(false, false, Precedence::Jump);
|
||||
match scan_right(&e.body, right_fixup, Precedence::Jump, 1, 1) {
|
||||
Scan::Fail => Scan::Bailout,
|
||||
Scan::Bailout | Scan::Consume => Scan::Consume,
|
||||
}
|
||||
} else {
|
||||
Scan::Consume
|
||||
}
|
||||
}
|
||||
Expr::Let(e) => {
|
||||
if bailout_offset >= 1 {
|
||||
return Scan::Consume;
|
||||
}
|
||||
let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Let);
|
||||
let scan = scan_right(
|
||||
&e.expr,
|
||||
right_fixup,
|
||||
Precedence::Let,
|
||||
1,
|
||||
if fixup.next_operator < Precedence::Let {
|
||||
0
|
||||
} else {
|
||||
1
|
||||
},
|
||||
);
|
||||
match scan {
|
||||
Scan::Fail | Scan::Bailout if fixup.next_operator < Precedence::Let => {
|
||||
return Scan::Bailout;
|
||||
}
|
||||
Scan::Consume => return Scan::Consume,
|
||||
_ => {}
|
||||
}
|
||||
if right_fixup.rightmost_subexpression_precedence(&e.expr) < Precedence::Let {
|
||||
Scan::Consume
|
||||
} else if let Scan::Fail = scan {
|
||||
Scan::Bailout
|
||||
} else {
|
||||
Scan::Consume
|
||||
}
|
||||
}
|
||||
Expr::Array(_)
|
||||
| Expr::Assign(_)
|
||||
| Expr::Async(_)
|
||||
| Expr::Await(_)
|
||||
| Expr::Binary(_)
|
||||
| Expr::Block(_)
|
||||
| Expr::Call(_)
|
||||
| Expr::Cast(_)
|
||||
| Expr::Const(_)
|
||||
| Expr::Continue(_)
|
||||
| Expr::Field(_)
|
||||
| Expr::ForLoop(_)
|
||||
| Expr::Group(_)
|
||||
| Expr::If(_)
|
||||
| Expr::Index(_)
|
||||
| Expr::Infer(_)
|
||||
| Expr::Lit(_)
|
||||
| Expr::Loop(_)
|
||||
| Expr::Macro(_)
|
||||
| Expr::Match(_)
|
||||
| Expr::MethodCall(_)
|
||||
| Expr::Paren(_)
|
||||
| Expr::Path(_)
|
||||
| Expr::Range(_)
|
||||
| Expr::Repeat(_)
|
||||
| Expr::Struct(_)
|
||||
| Expr::Try(_)
|
||||
| Expr::TryBlock(_)
|
||||
| Expr::Tuple(_)
|
||||
| Expr::Unsafe(_)
|
||||
| Expr::Verbatim(_)
|
||||
| Expr::While(_) => match fixup.next_operator {
|
||||
Precedence::Assign | Precedence::Range if precedence == Precedence::Range => Scan::Fail,
|
||||
_ if precedence == Precedence::Let && fixup.next_operator < Precedence::Let => {
|
||||
Scan::Fail
|
||||
}
|
||||
_ => consume_by_precedence,
|
||||
},
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,293 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::parse::ParseBuffer;
|
||||
use crate::token;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::Delimiter;
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub struct Parens<'a> {
|
||||
#[doc(hidden)]
|
||||
pub token: token::Paren,
|
||||
#[doc(hidden)]
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub struct Braces<'a> {
|
||||
#[doc(hidden)]
|
||||
pub token: token::Brace,
|
||||
#[doc(hidden)]
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub struct Brackets<'a> {
|
||||
#[doc(hidden)]
|
||||
pub token: token::Bracket,
|
||||
#[doc(hidden)]
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
#[doc(hidden)]
|
||||
pub struct Group<'a> {
|
||||
#[doc(hidden)]
|
||||
pub token: token::Group,
|
||||
#[doc(hidden)]
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse_parens<'a>(input: &ParseBuffer<'a>) -> Result<Parens<'a>> {
|
||||
parse_delimited(input, Delimiter::Parenthesis).map(|(span, content)| Parens {
|
||||
token: token::Paren(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse_braces<'a>(input: &ParseBuffer<'a>) -> Result<Braces<'a>> {
|
||||
parse_delimited(input, Delimiter::Brace).map(|(span, content)| Braces {
|
||||
token: token::Brace(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse_brackets<'a>(input: &ParseBuffer<'a>) -> Result<Brackets<'a>> {
|
||||
parse_delimited(input, Delimiter::Bracket).map(|(span, content)| Brackets {
|
||||
token: token::Bracket(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub(crate) fn parse_group<'a>(input: &ParseBuffer<'a>) -> Result<Group<'a>> {
|
||||
parse_delimited(input, Delimiter::None).map(|(span, content)| Group {
|
||||
token: token::Group(span.join()),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_delimited<'a>(
|
||||
input: &ParseBuffer<'a>,
|
||||
delimiter: Delimiter,
|
||||
) -> Result<(DelimSpan, ParseBuffer<'a>)> {
|
||||
input.step(|cursor| {
|
||||
if let Some((content, span, rest)) = cursor.group(delimiter) {
|
||||
let scope = span.close();
|
||||
let nested = crate::parse::advance_step_cursor(cursor, content);
|
||||
let unexpected = crate::parse::get_unexpected(input);
|
||||
let content = crate::parse::new_parse_buffer(scope, nested, unexpected);
|
||||
Ok(((span, content), rest))
|
||||
} else {
|
||||
let message = match delimiter {
|
||||
Delimiter::Parenthesis => "expected parentheses",
|
||||
Delimiter::Brace => "expected curly braces",
|
||||
Delimiter::Bracket => "expected square brackets",
|
||||
Delimiter::None => "expected invisible group",
|
||||
};
|
||||
Err(cursor.error(message))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse a set of parentheses and expose their content to subsequent parsers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// use syn::{parenthesized, token, Ident, Result, Token, Type};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
///
|
||||
/// // Parse a simplified tuple struct syntax like:
|
||||
/// //
|
||||
/// // struct S(A, B);
|
||||
/// struct TupleStruct {
|
||||
/// struct_token: Token![struct],
|
||||
/// ident: Ident,
|
||||
/// paren_token: token::Paren,
|
||||
/// fields: Punctuated<Type, Token![,]>,
|
||||
/// semi_token: Token![;],
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for TupleStruct {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let content;
|
||||
/// Ok(TupleStruct {
|
||||
/// struct_token: input.parse()?,
|
||||
/// ident: input.parse()?,
|
||||
/// paren_token: parenthesized!(content in input),
|
||||
/// fields: content.parse_terminated(Type::parse, Token![,])?,
|
||||
/// semi_token: input.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// # let input = quote! {
|
||||
/// # struct S(A, B);
|
||||
/// # };
|
||||
/// # syn::parse2::<TupleStruct>(input).unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! parenthesized {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::__private::parse_parens(&$cursor) {
|
||||
$crate::__private::Ok(parens) => {
|
||||
$content = parens.content;
|
||||
parens.token
|
||||
}
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Parse a set of curly braces and expose their content to subsequent parsers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// use syn::{braced, token, Ident, Result, Token, Type};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
///
|
||||
/// // Parse a simplified struct syntax like:
|
||||
/// //
|
||||
/// // struct S {
|
||||
/// // a: A,
|
||||
/// // b: B,
|
||||
/// // }
|
||||
/// struct Struct {
|
||||
/// struct_token: Token![struct],
|
||||
/// ident: Ident,
|
||||
/// brace_token: token::Brace,
|
||||
/// fields: Punctuated<Field, Token![,]>,
|
||||
/// }
|
||||
///
|
||||
/// struct Field {
|
||||
/// name: Ident,
|
||||
/// colon_token: Token![:],
|
||||
/// ty: Type,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for Struct {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let content;
|
||||
/// Ok(Struct {
|
||||
/// struct_token: input.parse()?,
|
||||
/// ident: input.parse()?,
|
||||
/// brace_token: braced!(content in input),
|
||||
/// fields: content.parse_terminated(Field::parse, Token![,])?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for Field {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// Ok(Field {
|
||||
/// name: input.parse()?,
|
||||
/// colon_token: input.parse()?,
|
||||
/// ty: input.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// # let input = quote! {
|
||||
/// # struct S {
|
||||
/// # a: A,
|
||||
/// # b: B,
|
||||
/// # }
|
||||
/// # };
|
||||
/// # syn::parse2::<Struct>(input).unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! braced {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::__private::parse_braces(&$cursor) {
|
||||
$crate::__private::Ok(braces) => {
|
||||
$content = braces.content;
|
||||
braces.token
|
||||
}
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Parse a set of square brackets and expose their content to subsequent
|
||||
/// parsers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use syn::{bracketed, token, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // Parse an outer attribute like:
|
||||
/// //
|
||||
/// // #[repr(C, packed)]
|
||||
/// struct OuterAttribute {
|
||||
/// pound_token: Token![#],
|
||||
/// bracket_token: token::Bracket,
|
||||
/// content: TokenStream,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for OuterAttribute {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let content;
|
||||
/// Ok(OuterAttribute {
|
||||
/// pound_token: input.parse()?,
|
||||
/// bracket_token: bracketed!(content in input),
|
||||
/// content: content.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// # let input = quote! {
|
||||
/// # #[repr(C, packed)]
|
||||
/// # };
|
||||
/// # syn::parse2::<OuterAttribute>(input).unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! bracketed {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::__private::parse_brackets(&$cursor) {
|
||||
$crate::__private::Ok(brackets) => {
|
||||
$content = brackets.content;
|
||||
brackets.token
|
||||
}
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::lookahead;
|
||||
|
||||
pub use proc_macro2::Ident;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub_if_not_doc! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Ident(marker: lookahead::TokenMarker) -> Ident {
|
||||
match marker {}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! ident_from_token {
|
||||
($token:ident) => {
|
||||
impl From<Token![$token]> for Ident {
|
||||
fn from(token: Token![$token]) -> Ident {
|
||||
Ident::new(stringify!($token), token.span)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
ident_from_token!(self);
|
||||
ident_from_token!(Self);
|
||||
ident_from_token!(super);
|
||||
ident_from_token!(crate);
|
||||
ident_from_token!(extern);
|
||||
|
||||
impl From<Token![_]> for Ident {
|
||||
fn from(token: Token![_]) -> Ident {
|
||||
Ident::new("_", token.span)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn xid_ok(symbol: &str) -> bool {
|
||||
let mut chars = symbol.chars();
|
||||
let first = chars.next().unwrap();
|
||||
if !(first == '_' || first.is_ascii_alphabetic()) {
|
||||
return false;
|
||||
}
|
||||
for ch in chars {
|
||||
if !(ch == '_' || ch.is_ascii_alphanumeric()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
mod parsing {
|
||||
use crate::buffer::Cursor;
|
||||
use crate::error::Result;
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
use crate::token::Token;
|
||||
use proc_macro2::Ident;
|
||||
|
||||
fn accept_as_ident(ident: &Ident) -> bool {
|
||||
match ident.to_string().as_str() {
|
||||
"_" |
|
||||
// Based on https://doc.rust-lang.org/1.65.0/reference/keywords.html
|
||||
"abstract" | "as" | "async" | "await" | "become" | "box" | "break" |
|
||||
"const" | "continue" | "crate" | "do" | "dyn" | "else" | "enum" |
|
||||
"extern" | "false" | "final" | "fn" | "for" | "if" | "impl" | "in" |
|
||||
"let" | "loop" | "macro" | "match" | "mod" | "move" | "mut" |
|
||||
"override" | "priv" | "pub" | "ref" | "return" | "Self" | "self" |
|
||||
"static" | "struct" | "super" | "trait" | "true" | "try" | "type" |
|
||||
"typeof" | "unsafe" | "unsized" | "use" | "virtual" | "where" |
|
||||
"while" | "yield" => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Ident {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
if let Some((ident, rest)) = cursor.ident() {
|
||||
if accept_as_ident(&ident) {
|
||||
Ok((ident, rest))
|
||||
} else {
|
||||
Err(cursor.error(format_args!(
|
||||
"expected identifier, found keyword `{}`",
|
||||
ident,
|
||||
)))
|
||||
}
|
||||
} else {
|
||||
Err(cursor.error("expected identifier"))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Token for Ident {
|
||||
fn peek(cursor: Cursor) -> bool {
|
||||
if let Some((ident, _rest)) = cursor.ident() {
|
||||
accept_as_ident(&ident)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn display() -> &'static str {
|
||||
"identifier"
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,158 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::lookahead;
|
||||
use proc_macro2::{Ident, Span};
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{self, Display};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
/// A Rust lifetime: `'a`.
|
||||
///
|
||||
/// Lifetime names must conform to the following rules:
|
||||
///
|
||||
/// - Must start with an apostrophe.
|
||||
/// - Must not consist of just an apostrophe: `'`.
|
||||
/// - Character after the apostrophe must be `_` or a Unicode code point with
|
||||
/// the XID_Start property.
|
||||
/// - All following characters must be Unicode code points with the XID_Continue
|
||||
/// property.
|
||||
pub struct Lifetime {
|
||||
pub apostrophe: Span,
|
||||
pub ident: Ident,
|
||||
}
|
||||
|
||||
impl Lifetime {
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the lifetime does not conform to the bulleted rules above.
|
||||
///
|
||||
/// # Invocation
|
||||
///
|
||||
/// ```
|
||||
/// # use proc_macro2::Span;
|
||||
/// # use syn::Lifetime;
|
||||
/// #
|
||||
/// # fn f() -> Lifetime {
|
||||
/// Lifetime::new("'a", Span::call_site())
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn new(symbol: &str, span: Span) -> Self {
|
||||
if !symbol.starts_with('\'') {
|
||||
panic!(
|
||||
"lifetime name must start with apostrophe as in \"'a\", got {:?}",
|
||||
symbol
|
||||
);
|
||||
}
|
||||
|
||||
if symbol == "'" {
|
||||
panic!("lifetime name must not be empty");
|
||||
}
|
||||
|
||||
if !crate::ident::xid_ok(&symbol[1..]) {
|
||||
panic!("{:?} is not a valid lifetime name", symbol);
|
||||
}
|
||||
|
||||
Lifetime {
|
||||
apostrophe: span,
|
||||
ident: Ident::new(&symbol[1..], span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.apostrophe
|
||||
.join(self.ident.span())
|
||||
.unwrap_or(self.apostrophe)
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.apostrophe = span;
|
||||
self.ident.set_span(span);
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Lifetime {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
"'".fmt(formatter)?;
|
||||
self.ident.fmt(formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Lifetime {
|
||||
fn clone(&self) -> Self {
|
||||
Lifetime {
|
||||
apostrophe: self.apostrophe,
|
||||
ident: self.ident.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Lifetime {
|
||||
fn eq(&self, other: &Lifetime) -> bool {
|
||||
self.ident.eq(&other.ident)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Lifetime {}
|
||||
|
||||
impl PartialOrd for Lifetime {
|
||||
fn partial_cmp(&self, other: &Lifetime) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Lifetime {
|
||||
fn cmp(&self, other: &Lifetime) -> Ordering {
|
||||
self.ident.cmp(&other.ident)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Lifetime {
|
||||
fn hash<H: Hasher>(&self, h: &mut H) {
|
||||
self.ident.hash(h);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub_if_not_doc! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime {
|
||||
match marker {}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::error::Result;
|
||||
use crate::lifetime::Lifetime;
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Lifetime {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
cursor
|
||||
.lifetime()
|
||||
.ok_or_else(|| cursor.error("expected lifetime"))
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use crate::lifetime::Lifetime;
|
||||
use proc_macro2::{Punct, Spacing, TokenStream};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Lifetime {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let mut apostrophe = Punct::new('\'', Spacing::Joint);
|
||||
apostrophe.set_span(self.apostrophe);
|
||||
tokens.append(apostrophe);
|
||||
self.ident.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,334 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::buffer::Cursor;
|
||||
use crate::error::{self, Error};
|
||||
use crate::sealed::lookahead::Sealed;
|
||||
use crate::span::IntoSpans;
|
||||
use crate::token::{CustomToken, Token};
|
||||
use proc_macro2::{Delimiter, Span};
|
||||
use std::cell::RefCell;
|
||||
|
||||
/// Support for checking the next token in a stream to decide how to parse.
|
||||
///
|
||||
/// An important advantage over [`ParseStream::peek`] is that here we
|
||||
/// automatically construct an appropriate error message based on the token
|
||||
/// alternatives that get peeked. If you are producing your own error message,
|
||||
/// go ahead and use `ParseStream::peek` instead.
|
||||
///
|
||||
/// Use [`ParseStream::lookahead1`] to construct this object.
|
||||
///
|
||||
/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek
|
||||
/// [`ParseStream::lookahead1`]: crate::parse::ParseBuffer::lookahead1
|
||||
///
|
||||
/// Consuming tokens from the source stream after constructing a lookahead
|
||||
/// object does not also advance the lookahead object.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{ConstParam, Ident, Lifetime, LifetimeParam, Result, Token, TypeParam};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // A generic parameter, a single one of the comma-separated elements inside
|
||||
/// // angle brackets in:
|
||||
/// //
|
||||
/// // fn f<T: Clone, 'a, 'b: 'a, const N: usize>() { ... }
|
||||
/// //
|
||||
/// // On invalid input, lookahead gives us a reasonable error message.
|
||||
/// //
|
||||
/// // error: expected one of: identifier, lifetime, `const`
|
||||
/// // |
|
||||
/// // 5 | fn f<!Sized>() {}
|
||||
/// // | ^
|
||||
/// enum GenericParam {
|
||||
/// Type(TypeParam),
|
||||
/// Lifetime(LifetimeParam),
|
||||
/// Const(ConstParam),
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for GenericParam {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let lookahead = input.lookahead1();
|
||||
/// if lookahead.peek(Ident) {
|
||||
/// input.parse().map(GenericParam::Type)
|
||||
/// } else if lookahead.peek(Lifetime) {
|
||||
/// input.parse().map(GenericParam::Lifetime)
|
||||
/// } else if lookahead.peek(Token![const]) {
|
||||
/// input.parse().map(GenericParam::Const)
|
||||
/// } else {
|
||||
/// Err(lookahead.error())
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub struct Lookahead1<'a> {
|
||||
scope: Span,
|
||||
cursor: Cursor<'a>,
|
||||
comparisons: RefCell<Vec<&'static str>>,
|
||||
}
|
||||
|
||||
pub(crate) fn new(scope: Span, cursor: Cursor) -> Lookahead1 {
|
||||
Lookahead1 {
|
||||
scope,
|
||||
cursor,
|
||||
comparisons: RefCell::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
fn peek_impl(
|
||||
lookahead: &Lookahead1,
|
||||
peek: fn(Cursor) -> bool,
|
||||
display: fn() -> &'static str,
|
||||
) -> bool {
|
||||
if peek(lookahead.cursor) {
|
||||
return true;
|
||||
}
|
||||
lookahead.comparisons.borrow_mut().push(display());
|
||||
false
|
||||
}
|
||||
|
||||
impl<'a> Lookahead1<'a> {
|
||||
/// Looks at the next token in the parse stream to determine whether it
|
||||
/// matches the requested type of token.
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// Note that this method does not use turbofish syntax. Pass the peek type
|
||||
/// inside of parentheses.
|
||||
///
|
||||
/// - `input.peek(Token![struct])`
|
||||
/// - `input.peek(Token![==])`
|
||||
/// - `input.peek(Ident)` *(does not accept keywords)*
|
||||
/// - `input.peek(Ident::peek_any)`
|
||||
/// - `input.peek(Lifetime)`
|
||||
/// - `input.peek(token::Brace)`
|
||||
pub fn peek<T: Peek>(&self, token: T) -> bool {
|
||||
let _ = token;
|
||||
peek_impl(self, T::Token::peek, T::Token::display)
|
||||
}
|
||||
|
||||
/// Triggers an error at the current position of the parse stream.
|
||||
///
|
||||
/// The error message will identify all of the expected token types that
|
||||
/// have been peeked against this lookahead instance.
|
||||
pub fn error(self) -> Error {
|
||||
let mut comparisons = self.comparisons.into_inner();
|
||||
comparisons.retain_mut(|display| {
|
||||
if *display == "`)`" {
|
||||
*display = match self.cursor.scope_delimiter() {
|
||||
Delimiter::Parenthesis => "`)`",
|
||||
Delimiter::Brace => "`}`",
|
||||
Delimiter::Bracket => "`]`",
|
||||
Delimiter::None => return false,
|
||||
}
|
||||
}
|
||||
true
|
||||
});
|
||||
match comparisons.len() {
|
||||
0 => {
|
||||
if self.cursor.eof() {
|
||||
Error::new(self.scope, "unexpected end of input")
|
||||
} else {
|
||||
Error::new(self.cursor.span(), "unexpected token")
|
||||
}
|
||||
}
|
||||
1 => {
|
||||
let message = format!("expected {}", comparisons[0]);
|
||||
error::new_at(self.scope, self.cursor, message)
|
||||
}
|
||||
2 => {
|
||||
let message = format!("expected {} or {}", comparisons[0], comparisons[1]);
|
||||
error::new_at(self.scope, self.cursor, message)
|
||||
}
|
||||
_ => {
|
||||
let join = comparisons.join(", ");
|
||||
let message = format!("expected one of: {}", join);
|
||||
error::new_at(self.scope, self.cursor, message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Types that can be parsed by looking at just one token.
|
||||
///
|
||||
/// Use [`ParseStream::peek`] to peek one of these types in a parse stream
|
||||
/// without consuming it from the stream.
|
||||
///
|
||||
/// This trait is sealed and cannot be implemented for types outside of Syn.
|
||||
///
|
||||
/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek
|
||||
pub trait Peek: Sealed {
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
type Token: Token;
|
||||
}
|
||||
|
||||
/// Pseudo-token used for peeking the end of a parse stream.
|
||||
///
|
||||
/// This type is only useful as an argument to one of the following functions:
|
||||
///
|
||||
/// - [`ParseStream::peek`][crate::parse::ParseBuffer::peek]
|
||||
/// - [`ParseStream::peek2`][crate::parse::ParseBuffer::peek2]
|
||||
/// - [`ParseStream::peek3`][crate::parse::ParseBuffer::peek3]
|
||||
/// - [`Lookahead1::peek`]
|
||||
///
|
||||
/// The peek will return `true` if there are no remaining tokens after that
|
||||
/// point in the parse stream.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Suppose we are parsing attributes containing core::fmt inspired formatting
|
||||
/// arguments:
|
||||
///
|
||||
/// - `#[fmt("simple example")]`
|
||||
/// - `#[fmt("interpolation e{}ample", self.x)]`
|
||||
/// - `#[fmt("interpolation e{x}ample")]`
|
||||
///
|
||||
/// and we want to recognize the cases where no interpolation occurs so that
|
||||
/// more efficient code can be generated.
|
||||
///
|
||||
/// The following implementation uses `input.peek(Token![,]) &&
|
||||
/// input.peek2(End)` to recognize the case of a trailing comma without
|
||||
/// consuming the comma from the parse stream, because if it isn't a trailing
|
||||
/// comma, that same comma needs to be parsed as part of `args`.
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use quote::quote;
|
||||
/// use syn::parse::{End, Parse, ParseStream, Result};
|
||||
/// use syn::{parse_quote, Attribute, LitStr, Token};
|
||||
///
|
||||
/// struct FormatArgs {
|
||||
/// template: LitStr, // "...{}..."
|
||||
/// args: TokenStream, // , self.x
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for FormatArgs {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let template: LitStr = input.parse()?;
|
||||
///
|
||||
/// let args = if input.is_empty()
|
||||
/// || input.peek(Token![,]) && input.peek2(End)
|
||||
/// {
|
||||
/// input.parse::<Option<Token![,]>>()?;
|
||||
/// TokenStream::new()
|
||||
/// } else {
|
||||
/// input.parse()?
|
||||
/// };
|
||||
///
|
||||
/// Ok(FormatArgs {
|
||||
/// template,
|
||||
/// args,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn main() -> Result<()> {
|
||||
/// let attrs: Vec<Attribute> = parse_quote! {
|
||||
/// #[fmt("simple example")]
|
||||
/// #[fmt("interpolation e{}ample", self.x)]
|
||||
/// #[fmt("interpolation e{x}ample")]
|
||||
/// };
|
||||
///
|
||||
/// for attr in &attrs {
|
||||
/// let FormatArgs { template, args } = attr.parse_args()?;
|
||||
/// let requires_fmt_machinery =
|
||||
/// !args.is_empty() || template.value().contains(['{', '}']);
|
||||
/// let out = if requires_fmt_machinery {
|
||||
/// quote! {
|
||||
/// ::core::write!(__formatter, #template #args)
|
||||
/// }
|
||||
/// } else {
|
||||
/// quote! {
|
||||
/// __formatter.write_str(#template)
|
||||
/// }
|
||||
/// };
|
||||
/// println!("{}", out);
|
||||
/// }
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Implementing this parsing logic without `peek2(End)` is more clumsy because
|
||||
/// we'd need a parse stream actually advanced past the comma before being able
|
||||
/// to find out whether there is anything after it. It would look something
|
||||
/// like:
|
||||
///
|
||||
/// ```
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// # use syn::parse::{ParseStream, Result};
|
||||
/// # use syn::Token;
|
||||
/// #
|
||||
/// # fn parse(input: ParseStream) -> Result<()> {
|
||||
/// use syn::parse::discouraged::Speculative as _;
|
||||
///
|
||||
/// let ahead = input.fork();
|
||||
/// ahead.parse::<Option<Token![,]>>()?;
|
||||
/// let args = if ahead.is_empty() {
|
||||
/// input.advance_to(&ahead);
|
||||
/// TokenStream::new()
|
||||
/// } else {
|
||||
/// input.parse()?
|
||||
/// };
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// or:
|
||||
///
|
||||
/// ```
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// # use syn::parse::{ParseStream, Result};
|
||||
/// # use syn::Token;
|
||||
/// #
|
||||
/// # fn parse(input: ParseStream) -> Result<()> {
|
||||
/// use quote::ToTokens as _;
|
||||
///
|
||||
/// let comma: Option<Token![,]> = input.parse()?;
|
||||
/// let mut args = TokenStream::new();
|
||||
/// if !input.is_empty() {
|
||||
/// comma.to_tokens(&mut args);
|
||||
/// input.parse::<TokenStream>()?.to_tokens(&mut args);
|
||||
/// }
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
pub struct End;
|
||||
|
||||
impl Copy for End {}
|
||||
|
||||
impl Clone for End {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl Peek for End {
|
||||
type Token = Self;
|
||||
}
|
||||
|
||||
impl CustomToken for End {
|
||||
fn peek(cursor: Cursor) -> bool {
|
||||
cursor.eof()
|
||||
}
|
||||
|
||||
fn display() -> &'static str {
|
||||
"`)`" // Lookahead1 error message will fill in the expected close delimiter
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Peek for F {
|
||||
type Token = T;
|
||||
}
|
||||
|
||||
pub enum TokenMarker {}
|
||||
|
||||
impl<S> IntoSpans<S> for TokenMarker {
|
||||
fn into_spans(self) -> S {
|
||||
match self {}
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Sealed for F {}
|
||||
|
||||
impl Sealed for End {}
|
||||
|
|
@ -0,0 +1,227 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::error::Result;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, ParseStream, Parser};
|
||||
use crate::path::Path;
|
||||
use crate::token::{Brace, Bracket, Paren};
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::Delimiter;
|
||||
use proc_macro2::TokenStream;
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::TokenTree;
|
||||
|
||||
ast_struct! {
|
||||
/// A macro invocation: `println!("{}", mac)`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Macro {
|
||||
pub path: Path,
|
||||
pub bang_token: Token![!],
|
||||
pub delimiter: MacroDelimiter,
|
||||
pub tokens: TokenStream,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum MacroDelimiter {
|
||||
Paren(Paren),
|
||||
Brace(Brace),
|
||||
Bracket(Bracket),
|
||||
}
|
||||
}
|
||||
|
||||
impl MacroDelimiter {
|
||||
pub fn span(&self) -> &DelimSpan {
|
||||
match self {
|
||||
MacroDelimiter::Paren(token) => &token.span,
|
||||
MacroDelimiter::Brace(token) => &token.span,
|
||||
MacroDelimiter::Bracket(token) => &token.span,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "full", any(feature = "parsing", feature = "printing")))]
|
||||
pub(crate) fn is_brace(&self) -> bool {
|
||||
match self {
|
||||
MacroDelimiter::Brace(_) => true,
|
||||
MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Macro {
|
||||
/// Parse the tokens within the macro invocation's delimiters into a syntax
|
||||
/// tree.
|
||||
///
|
||||
/// This is equivalent to `syn::parse2::<T>(mac.tokens)` except that it
|
||||
/// produces a more useful span when `tokens` is empty.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Expr, ExprLit, Ident, Lit, LitStr, Macro, Token};
|
||||
/// use syn::ext::IdentExt;
|
||||
/// use syn::parse::{Error, Parse, ParseStream, Result};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
///
|
||||
/// // The arguments expected by libcore's format_args macro, and as a
|
||||
/// // result most other formatting and printing macros like println.
|
||||
/// //
|
||||
/// // println!("{} is {number:.prec$}", "x", prec=5, number=0.01)
|
||||
/// struct FormatArgs {
|
||||
/// format_string: Expr,
|
||||
/// positional_args: Vec<Expr>,
|
||||
/// named_args: Vec<(Ident, Expr)>,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for FormatArgs {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let format_string: Expr;
|
||||
/// let mut positional_args = Vec::new();
|
||||
/// let mut named_args = Vec::new();
|
||||
///
|
||||
/// format_string = input.parse()?;
|
||||
/// while !input.is_empty() {
|
||||
/// input.parse::<Token![,]>()?;
|
||||
/// if input.is_empty() {
|
||||
/// break;
|
||||
/// }
|
||||
/// if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
|
||||
/// while !input.is_empty() {
|
||||
/// let name: Ident = input.call(Ident::parse_any)?;
|
||||
/// input.parse::<Token![=]>()?;
|
||||
/// let value: Expr = input.parse()?;
|
||||
/// named_args.push((name, value));
|
||||
/// if input.is_empty() {
|
||||
/// break;
|
||||
/// }
|
||||
/// input.parse::<Token![,]>()?;
|
||||
/// }
|
||||
/// break;
|
||||
/// }
|
||||
/// positional_args.push(input.parse()?);
|
||||
/// }
|
||||
///
|
||||
/// Ok(FormatArgs {
|
||||
/// format_string,
|
||||
/// positional_args,
|
||||
/// named_args,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// // Extract the first argument, the format string literal, from an
|
||||
/// // invocation of a formatting or printing macro.
|
||||
/// fn get_format_string(m: &Macro) -> Result<LitStr> {
|
||||
/// let args: FormatArgs = m.parse_body()?;
|
||||
/// match args.format_string {
|
||||
/// Expr::Lit(ExprLit { lit: Lit::Str(lit), .. }) => Ok(lit),
|
||||
/// other => {
|
||||
/// // First argument was not a string literal expression.
|
||||
/// // Maybe something like: println!(concat!(...), ...)
|
||||
/// Err(Error::new_spanned(other, "format string must be a string literal"))
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let invocation = parse_quote! {
|
||||
/// println!("{:?}", Instant::now())
|
||||
/// };
|
||||
/// let lit = get_format_string(&invocation).unwrap();
|
||||
/// assert_eq!(lit.value(), "{:?}");
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_body<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_body_with(T::parse)
|
||||
}
|
||||
|
||||
/// Parse the tokens within the macro invocation's delimiters using the
|
||||
/// given parser.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
let scope = self.delimiter.span().close();
|
||||
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, TokenStream)> {
|
||||
input.step(|cursor| {
|
||||
if let Some((TokenTree::Group(g), rest)) = cursor.token_tree() {
|
||||
let span = g.delim_span();
|
||||
let delimiter = match g.delimiter() {
|
||||
Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
|
||||
Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
|
||||
Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)),
|
||||
Delimiter::None => {
|
||||
return Err(cursor.error("expected delimiter"));
|
||||
}
|
||||
};
|
||||
Ok(((delimiter, g.stream()), rest))
|
||||
} else {
|
||||
Err(cursor.error("expected delimiter"))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::error::Result;
|
||||
use crate::mac::{parse_delimiter, Macro};
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
use crate::path::Path;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Macro {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let tokens;
|
||||
Ok(Macro {
|
||||
path: input.call(Path::parse_mod_style)?,
|
||||
bang_token: input.parse()?,
|
||||
delimiter: {
|
||||
let (delimiter, content) = parse_delimiter(input)?;
|
||||
tokens = content;
|
||||
delimiter
|
||||
},
|
||||
tokens,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use crate::mac::{Macro, MacroDelimiter};
|
||||
use crate::path;
|
||||
use crate::path::printing::PathStyle;
|
||||
use crate::token;
|
||||
use proc_macro2::{Delimiter, TokenStream};
|
||||
use quote::ToTokens;
|
||||
|
||||
impl MacroDelimiter {
|
||||
pub(crate) fn surround(&self, tokens: &mut TokenStream, inner: TokenStream) {
|
||||
let (delim, span) = match self {
|
||||
MacroDelimiter::Paren(paren) => (Delimiter::Parenthesis, paren.span),
|
||||
MacroDelimiter::Brace(brace) => (Delimiter::Brace, brace.span),
|
||||
MacroDelimiter::Bracket(bracket) => (Delimiter::Bracket, bracket.span),
|
||||
};
|
||||
token::printing::delim(delim, span.join(), tokens, inner);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Macro {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
path::printing::print_path(tokens, &self.path, PathStyle::Mod);
|
||||
self.bang_token.to_tokens(tokens);
|
||||
self.delimiter.surround(tokens, self.tokens.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,184 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg_attr(
|
||||
not(any(feature = "full", feature = "derive")),
|
||||
allow(unknown_lints, unused_macro_rules)
|
||||
)]
|
||||
macro_rules! ast_struct {
|
||||
(
|
||||
$(#[$attr:meta])*
|
||||
$pub:ident $struct:ident $name:ident #full $body:tt
|
||||
) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
check_keyword_matches!(struct $struct);
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
$(#[$attr])* $pub $struct $name $body
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
$(#[$attr])* $pub $struct $name {
|
||||
_noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
|
||||
}
|
||||
|
||||
#[cfg(all(not(feature = "full"), feature = "printing"))]
|
||||
impl ::quote::ToTokens for $name {
|
||||
fn to_tokens(&self, _: &mut ::proc_macro2::TokenStream) {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
(
|
||||
$(#[$attr:meta])*
|
||||
$pub:ident $struct:ident $name:ident $body:tt
|
||||
) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
check_keyword_matches!(struct $struct);
|
||||
|
||||
$(#[$attr])* $pub $struct $name $body
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
macro_rules! ast_enum {
|
||||
(
|
||||
$(#[$enum_attr:meta])*
|
||||
$pub:ident $enum:ident $name:ident $body:tt
|
||||
) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
check_keyword_matches!(enum $enum);
|
||||
|
||||
$(#[$enum_attr])* $pub $enum $name $body
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! ast_enum_of_structs {
|
||||
(
|
||||
$(#[$enum_attr:meta])*
|
||||
$pub:ident $enum:ident $name:ident $body:tt
|
||||
) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
check_keyword_matches!(enum $enum);
|
||||
|
||||
$(#[$enum_attr])* $pub $enum $name $body
|
||||
|
||||
ast_enum_of_structs_impl!($name $body);
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
generate_to_tokens!(() tokens $name $body);
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! ast_enum_of_structs_impl {
|
||||
(
|
||||
$name:ident {
|
||||
$(
|
||||
$(#[cfg $cfg_attr:tt])*
|
||||
$(#[doc $($doc_attr:tt)*])*
|
||||
$variant:ident $( ($member:ident) )*,
|
||||
)*
|
||||
}
|
||||
) => {
|
||||
$($(
|
||||
ast_enum_from_struct!($name::$variant, $member);
|
||||
)*)*
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! ast_enum_from_struct {
|
||||
// No From<TokenStream> for verbatim variants.
|
||||
($name:ident::Verbatim, $member:ident) => {};
|
||||
|
||||
($name:ident::$variant:ident, $member:ident) => {
|
||||
impl From<$member> for $name {
|
||||
fn from(e: $member) -> $name {
|
||||
$name::$variant(e)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
macro_rules! generate_to_tokens {
|
||||
(
|
||||
($($arms:tt)*) $tokens:ident $name:ident {
|
||||
$(#[cfg $cfg_attr:tt])*
|
||||
$(#[doc $($doc_attr:tt)*])*
|
||||
$variant:ident,
|
||||
$($next:tt)*
|
||||
}
|
||||
) => {
|
||||
generate_to_tokens!(
|
||||
($($arms)* $(#[cfg $cfg_attr])* $name::$variant => {})
|
||||
$tokens $name { $($next)* }
|
||||
);
|
||||
};
|
||||
|
||||
(
|
||||
($($arms:tt)*) $tokens:ident $name:ident {
|
||||
$(#[cfg $cfg_attr:tt])*
|
||||
$(#[doc $($doc_attr:tt)*])*
|
||||
$variant:ident($member:ident),
|
||||
$($next:tt)*
|
||||
}
|
||||
) => {
|
||||
generate_to_tokens!(
|
||||
($($arms)* $(#[cfg $cfg_attr])* $name::$variant(_e) => _e.to_tokens($tokens),)
|
||||
$tokens $name { $($next)* }
|
||||
);
|
||||
};
|
||||
|
||||
(($($arms:tt)*) $tokens:ident $name:ident {}) => {
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ::quote::ToTokens for $name {
|
||||
fn to_tokens(&self, $tokens: &mut ::proc_macro2::TokenStream) {
|
||||
match self {
|
||||
$($arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Rustdoc bug: does not respect the doc(hidden) on some items.
|
||||
#[cfg(all(doc, feature = "parsing"))]
|
||||
macro_rules! pub_if_not_doc {
|
||||
($(#[$m:meta])* $pub:ident $($item:tt)*) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
|
||||
$(#[$m])*
|
||||
$pub(crate) $($item)*
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(all(not(doc), feature = "parsing"))]
|
||||
macro_rules! pub_if_not_doc {
|
||||
($(#[$m:meta])* $pub:ident $($item:tt)*) => {
|
||||
check_keyword_matches!(pub $pub);
|
||||
|
||||
$(#[$m])*
|
||||
$pub $($item)*
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! check_keyword_matches {
|
||||
(enum enum) => {};
|
||||
(pub pub) => {};
|
||||
(struct struct) => {};
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
macro_rules! return_impl_trait {
|
||||
(
|
||||
$(#[$attr:meta])*
|
||||
$vis:vis fn $name:ident $args:tt -> $impl_trait:ty [$concrete:ty] $body:block
|
||||
) => {
|
||||
#[cfg(not(docsrs))]
|
||||
$(#[$attr])*
|
||||
$vis fn $name $args -> $concrete $body
|
||||
|
||||
#[cfg(docsrs)]
|
||||
$(#[$attr])*
|
||||
$vis fn $name $args -> $impl_trait $body
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1,429 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
//! Facility for interpreting structured content inside of an `Attribute`.
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use crate::ext::IdentExt as _;
|
||||
use crate::lit::Lit;
|
||||
use crate::parse::{ParseStream, Parser};
|
||||
use crate::path::{Path, PathSegment};
|
||||
use crate::punctuated::Punctuated;
|
||||
use proc_macro2::Ident;
|
||||
use std::fmt::Display;
|
||||
|
||||
/// Make a parser that is usable with `parse_macro_input!` in a
|
||||
/// `#[proc_macro_attribute]` macro.
|
||||
///
|
||||
/// *Warning:* When parsing attribute args **other than** the
|
||||
/// `proc_macro::TokenStream` input of a `proc_macro_attribute`, you do **not**
|
||||
/// need this function. In several cases your callers will get worse error
|
||||
/// messages if you use this function, because the surrounding delimiter's span
|
||||
/// is concealed from attribute macros by rustc. Use
|
||||
/// [`Attribute::parse_nested_meta`] instead.
|
||||
///
|
||||
/// [`Attribute::parse_nested_meta`]: crate::Attribute::parse_nested_meta
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// This example implements an attribute macro whose invocations look like this:
|
||||
///
|
||||
/// ```
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[tea(kind = "EarlGrey", hot)]
|
||||
/// struct Picard {...}
|
||||
/// # };
|
||||
/// ```
|
||||
///
|
||||
/// The "parameters" supported by the attribute are:
|
||||
///
|
||||
/// - `kind = "..."`
|
||||
/// - `hot`
|
||||
/// - `with(sugar, milk, ...)`, a comma-separated list of ingredients
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, LitStr, Path};
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_attribute]
|
||||
/// # };
|
||||
/// pub fn tea(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
/// let mut kind: Option<LitStr> = None;
|
||||
/// let mut hot: bool = false;
|
||||
/// let mut with: Vec<Path> = Vec::new();
|
||||
/// let tea_parser = syn::meta::parser(|meta| {
|
||||
/// if meta.path.is_ident("kind") {
|
||||
/// kind = Some(meta.value()?.parse()?);
|
||||
/// Ok(())
|
||||
/// } else if meta.path.is_ident("hot") {
|
||||
/// hot = true;
|
||||
/// Ok(())
|
||||
/// } else if meta.path.is_ident("with") {
|
||||
/// meta.parse_nested_meta(|meta| {
|
||||
/// with.push(meta.path);
|
||||
/// Ok(())
|
||||
/// })
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported tea property"))
|
||||
/// }
|
||||
/// });
|
||||
///
|
||||
/// parse_macro_input!(args with tea_parser);
|
||||
/// eprintln!("kind={kind:?} hot={hot} with={with:?}");
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The `syn::meta` library will take care of dealing with the commas including
|
||||
/// trailing commas, and producing sensible error messages on unexpected input.
|
||||
///
|
||||
/// ```console
|
||||
/// error: expected `,`
|
||||
/// --> src/main.rs:3:37
|
||||
/// |
|
||||
/// 3 | #[tea(kind = "EarlGrey", with(sugar = "lol", milk))]
|
||||
/// | ^
|
||||
/// ```
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Same as above but we factor out most of the logic into a separate function.
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::meta::ParseNestedMeta;
|
||||
/// use syn::parse::{Parser, Result};
|
||||
/// use syn::{parse_macro_input, LitStr, Path};
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_attribute]
|
||||
/// # };
|
||||
/// pub fn tea(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
/// let mut attrs = TeaAttributes::default();
|
||||
/// let tea_parser = syn::meta::parser(|meta| attrs.parse(meta));
|
||||
/// parse_macro_input!(args with tea_parser);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
///
|
||||
/// #[derive(Default)]
|
||||
/// struct TeaAttributes {
|
||||
/// kind: Option<LitStr>,
|
||||
/// hot: bool,
|
||||
/// with: Vec<Path>,
|
||||
/// }
|
||||
///
|
||||
/// impl TeaAttributes {
|
||||
/// fn parse(&mut self, meta: ParseNestedMeta) -> Result<()> {
|
||||
/// if meta.path.is_ident("kind") {
|
||||
/// self.kind = Some(meta.value()?.parse()?);
|
||||
/// Ok(())
|
||||
/// } else /* just like in last example */
|
||||
/// # { unimplemented!() }
|
||||
///
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub fn parser(logic: impl FnMut(ParseNestedMeta) -> Result<()>) -> impl Parser<Output = ()> {
|
||||
|input: ParseStream| {
|
||||
if input.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
parse_nested_meta(input, logic)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Context for parsing a single property in the conventional syntax for
|
||||
/// structured attributes.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Refer to usage examples on the following two entry-points:
|
||||
///
|
||||
/// - [`Attribute::parse_nested_meta`] if you have an entire `Attribute` to
|
||||
/// parse. Always use this if possible. Generally this is able to produce
|
||||
/// better error messages because `Attribute` holds span information for all
|
||||
/// of the delimiters therein.
|
||||
///
|
||||
/// - [`syn::meta::parser`] if you are implementing a `proc_macro_attribute`
|
||||
/// macro and parsing the arguments to the attribute macro, i.e. the ones
|
||||
/// written in the same attribute that dispatched the macro invocation. Rustc
|
||||
/// does not pass span information for the surrounding delimiters into the
|
||||
/// attribute macro invocation in this situation, so error messages might be
|
||||
/// less precise.
|
||||
///
|
||||
/// [`Attribute::parse_nested_meta`]: crate::Attribute::parse_nested_meta
|
||||
/// [`syn::meta::parser`]: crate::meta::parser
|
||||
#[non_exhaustive]
|
||||
pub struct ParseNestedMeta<'a> {
|
||||
pub path: Path,
|
||||
pub input: ParseStream<'a>,
|
||||
}
|
||||
|
||||
impl<'a> ParseNestedMeta<'a> {
|
||||
/// Used when parsing `key = "value"` syntax.
|
||||
///
|
||||
/// All it does is advance `meta.input` past the `=` sign in the input. You
|
||||
/// could accomplish the same effect by writing
|
||||
/// `meta.parse::<Token![=]>()?`, so at most it is a minor convenience to
|
||||
/// use `meta.value()?`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Attribute, LitStr};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[tea(kind = "EarlGrey")]
|
||||
/// };
|
||||
/// // conceptually:
|
||||
/// if attr.path().is_ident("tea") { // this parses the `tea`
|
||||
/// attr.parse_nested_meta(|meta| { // this parses the `(`
|
||||
/// if meta.path.is_ident("kind") { // this parses the `kind`
|
||||
/// let value = meta.value()?; // this parses the `=`
|
||||
/// let s: LitStr = value.parse()?; // this parses `"EarlGrey"`
|
||||
/// if s.value() == "EarlGrey" {
|
||||
/// // ...
|
||||
/// }
|
||||
/// Ok(())
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported attribute"))
|
||||
/// }
|
||||
/// })?;
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
pub fn value(&self) -> Result<ParseStream<'a>> {
|
||||
self.input.parse::<Token![=]>()?;
|
||||
Ok(self.input)
|
||||
}
|
||||
|
||||
/// Used when parsing `list(...)` syntax **if** the content inside the
|
||||
/// nested parentheses is also expected to conform to Rust's structured
|
||||
/// attribute convention.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Attribute};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[tea(with(sugar, milk))]
|
||||
/// };
|
||||
///
|
||||
/// if attr.path().is_ident("tea") {
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// if meta.path.is_ident("with") {
|
||||
/// meta.parse_nested_meta(|meta| { // <---
|
||||
/// if meta.path.is_ident("sugar") {
|
||||
/// // Here we can go even deeper if needed.
|
||||
/// Ok(())
|
||||
/// } else if meta.path.is_ident("milk") {
|
||||
/// Ok(())
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported ingredient"))
|
||||
/// }
|
||||
/// })
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported tea property"))
|
||||
/// }
|
||||
/// })?;
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
///
|
||||
/// # Counterexample
|
||||
///
|
||||
/// If you don't need `parse_nested_meta`'s help in parsing the content
|
||||
/// written within the nested parentheses, keep in mind that you can always
|
||||
/// just parse it yourself from the exposed ParseStream. Rust syntax permits
|
||||
/// arbitrary tokens within those parentheses so for the crazier stuff,
|
||||
/// `parse_nested_meta` is not what you want.
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parenthesized, parse_quote, Attribute, LitInt};
|
||||
///
|
||||
/// let attr: Attribute = parse_quote! {
|
||||
/// #[repr(align(32))]
|
||||
/// };
|
||||
///
|
||||
/// let mut align: Option<LitInt> = None;
|
||||
/// if attr.path().is_ident("repr") {
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// if meta.path.is_ident("align") {
|
||||
/// let content;
|
||||
/// parenthesized!(content in meta.input);
|
||||
/// align = Some(content.parse()?);
|
||||
/// Ok(())
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported repr"))
|
||||
/// }
|
||||
/// })?;
|
||||
/// }
|
||||
/// # anyhow::Ok(())
|
||||
/// ```
|
||||
pub fn parse_nested_meta(
|
||||
&self,
|
||||
logic: impl FnMut(ParseNestedMeta) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
let content;
|
||||
parenthesized!(content in self.input);
|
||||
parse_nested_meta(&content, logic)
|
||||
}
|
||||
|
||||
/// Report that the attribute's content did not conform to expectations.
|
||||
///
|
||||
/// The span of the resulting error will cover `meta.path` *and* everything
|
||||
/// that has been parsed so far since it.
|
||||
///
|
||||
/// There are 2 ways you might call this. First, if `meta.path` is not
|
||||
/// something you recognize:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::Attribute;
|
||||
/// #
|
||||
/// # fn example(attr: &Attribute) -> syn::Result<()> {
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// if meta.path.is_ident("kind") {
|
||||
/// // ...
|
||||
/// Ok(())
|
||||
/// } else {
|
||||
/// Err(meta.error("unsupported tea property"))
|
||||
/// }
|
||||
/// })?;
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// In this case, it behaves exactly like
|
||||
/// `syn::Error::new_spanned(&meta.path, "message...")`.
|
||||
///
|
||||
/// ```console
|
||||
/// error: unsupported tea property
|
||||
/// --> src/main.rs:3:26
|
||||
/// |
|
||||
/// 3 | #[tea(kind = "EarlGrey", wat = "foo")]
|
||||
/// | ^^^
|
||||
/// ```
|
||||
///
|
||||
/// More usefully, the second place is if you've already parsed a value but
|
||||
/// have decided not to accept the value:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::Attribute;
|
||||
/// #
|
||||
/// # fn example(attr: &Attribute) -> syn::Result<()> {
|
||||
/// use syn::Expr;
|
||||
///
|
||||
/// attr.parse_nested_meta(|meta| {
|
||||
/// if meta.path.is_ident("kind") {
|
||||
/// let expr: Expr = meta.value()?.parse()?;
|
||||
/// match expr {
|
||||
/// Expr::Lit(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// Expr::Path(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// Expr::Macro(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// _ => Err(meta.error("tea kind must be a string literal, path, or macro")),
|
||||
/// }
|
||||
/// } else /* as above */
|
||||
/// # { unimplemented!() }
|
||||
///
|
||||
/// })?;
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// ```console
|
||||
/// error: tea kind must be a string literal, path, or macro
|
||||
/// --> src/main.rs:3:7
|
||||
/// |
|
||||
/// 3 | #[tea(kind = async { replicator.await })]
|
||||
/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
/// ```
|
||||
///
|
||||
/// Often you may want to use `syn::Error::new_spanned` even in this
|
||||
/// situation. In the above code, that would be:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{Error, Expr};
|
||||
/// #
|
||||
/// # fn example(expr: Expr) -> syn::Result<()> {
|
||||
/// match expr {
|
||||
/// Expr::Lit(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// Expr::Path(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// Expr::Macro(expr) => /* ... */
|
||||
/// # unimplemented!(),
|
||||
/// _ => Err(Error::new_spanned(expr, "unsupported expression type for `kind`")),
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// ```console
|
||||
/// error: unsupported expression type for `kind`
|
||||
/// --> src/main.rs:3:14
|
||||
/// |
|
||||
/// 3 | #[tea(kind = async { replicator.await })]
|
||||
/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
/// ```
|
||||
pub fn error(&self, msg: impl Display) -> Error {
|
||||
let start_span = self.path.segments[0].ident.span();
|
||||
let end_span = self.input.cursor().prev_span();
|
||||
crate::error::new2(start_span, end_span, msg)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn parse_nested_meta(
|
||||
input: ParseStream,
|
||||
mut logic: impl FnMut(ParseNestedMeta) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
loop {
|
||||
let path = input.call(parse_meta_path)?;
|
||||
logic(ParseNestedMeta { path, input })?;
|
||||
if input.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
input.parse::<Token![,]>()?;
|
||||
if input.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Like Path::parse_mod_style, but accepts keywords in the path.
|
||||
fn parse_meta_path(input: ParseStream) -> Result<Path> {
|
||||
Ok(Path {
|
||||
leading_colon: input.parse()?,
|
||||
segments: {
|
||||
let mut segments = Punctuated::new();
|
||||
if input.peek(Ident::peek_any) {
|
||||
let ident = Ident::parse_any(input)?;
|
||||
segments.push_value(PathSegment::from(ident));
|
||||
} else if input.is_empty() {
|
||||
return Err(input.error("expected nested attribute"));
|
||||
} else if input.peek(Lit) {
|
||||
return Err(input.error("unexpected literal in nested attribute, expected ident"));
|
||||
} else {
|
||||
return Err(input.error("unexpected token in nested attribute, expected ident"));
|
||||
}
|
||||
while input.peek(Token![::]) {
|
||||
let punct = input.parse()?;
|
||||
segments.push_punct(punct);
|
||||
let ident = Ident::parse_any(input)?;
|
||||
segments.push_value(PathSegment::from(ident));
|
||||
}
|
||||
segments
|
||||
},
|
||||
})
|
||||
}
|
||||
|
|
@ -0,0 +1,221 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
ast_enum! {
|
||||
/// A binary operator: `+`, `+=`, `&`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
#[non_exhaustive]
|
||||
pub enum BinOp {
|
||||
/// The `+` operator (addition)
|
||||
Add(Token![+]),
|
||||
/// The `-` operator (subtraction)
|
||||
Sub(Token![-]),
|
||||
/// The `*` operator (multiplication)
|
||||
Mul(Token![*]),
|
||||
/// The `/` operator (division)
|
||||
Div(Token![/]),
|
||||
/// The `%` operator (modulus)
|
||||
Rem(Token![%]),
|
||||
/// The `&&` operator (logical and)
|
||||
And(Token![&&]),
|
||||
/// The `||` operator (logical or)
|
||||
Or(Token![||]),
|
||||
/// The `^` operator (bitwise xor)
|
||||
BitXor(Token![^]),
|
||||
/// The `&` operator (bitwise and)
|
||||
BitAnd(Token![&]),
|
||||
/// The `|` operator (bitwise or)
|
||||
BitOr(Token![|]),
|
||||
/// The `<<` operator (shift left)
|
||||
Shl(Token![<<]),
|
||||
/// The `>>` operator (shift right)
|
||||
Shr(Token![>>]),
|
||||
/// The `==` operator (equality)
|
||||
Eq(Token![==]),
|
||||
/// The `<` operator (less than)
|
||||
Lt(Token![<]),
|
||||
/// The `<=` operator (less than or equal to)
|
||||
Le(Token![<=]),
|
||||
/// The `!=` operator (not equal to)
|
||||
Ne(Token![!=]),
|
||||
/// The `>=` operator (greater than or equal to)
|
||||
Ge(Token![>=]),
|
||||
/// The `>` operator (greater than)
|
||||
Gt(Token![>]),
|
||||
/// The `+=` operator
|
||||
AddAssign(Token![+=]),
|
||||
/// The `-=` operator
|
||||
SubAssign(Token![-=]),
|
||||
/// The `*=` operator
|
||||
MulAssign(Token![*=]),
|
||||
/// The `/=` operator
|
||||
DivAssign(Token![/=]),
|
||||
/// The `%=` operator
|
||||
RemAssign(Token![%=]),
|
||||
/// The `^=` operator
|
||||
BitXorAssign(Token![^=]),
|
||||
/// The `&=` operator
|
||||
BitAndAssign(Token![&=]),
|
||||
/// The `|=` operator
|
||||
BitOrAssign(Token![|=]),
|
||||
/// The `<<=` operator
|
||||
ShlAssign(Token![<<=]),
|
||||
/// The `>>=` operator
|
||||
ShrAssign(Token![>>=]),
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// A unary operator: `*`, `!`, `-`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
#[non_exhaustive]
|
||||
pub enum UnOp {
|
||||
/// The `*` operator for dereferencing
|
||||
Deref(Token![*]),
|
||||
/// The `!` operator for logical inversion
|
||||
Not(Token![!]),
|
||||
/// The `-` operator for negation
|
||||
Neg(Token![-]),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::error::Result;
|
||||
use crate::op::{BinOp, UnOp};
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for BinOp {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Token![+=]) {
|
||||
input.parse().map(BinOp::AddAssign)
|
||||
} else if input.peek(Token![-=]) {
|
||||
input.parse().map(BinOp::SubAssign)
|
||||
} else if input.peek(Token![*=]) {
|
||||
input.parse().map(BinOp::MulAssign)
|
||||
} else if input.peek(Token![/=]) {
|
||||
input.parse().map(BinOp::DivAssign)
|
||||
} else if input.peek(Token![%=]) {
|
||||
input.parse().map(BinOp::RemAssign)
|
||||
} else if input.peek(Token![^=]) {
|
||||
input.parse().map(BinOp::BitXorAssign)
|
||||
} else if input.peek(Token![&=]) {
|
||||
input.parse().map(BinOp::BitAndAssign)
|
||||
} else if input.peek(Token![|=]) {
|
||||
input.parse().map(BinOp::BitOrAssign)
|
||||
} else if input.peek(Token![<<=]) {
|
||||
input.parse().map(BinOp::ShlAssign)
|
||||
} else if input.peek(Token![>>=]) {
|
||||
input.parse().map(BinOp::ShrAssign)
|
||||
} else if input.peek(Token![&&]) {
|
||||
input.parse().map(BinOp::And)
|
||||
} else if input.peek(Token![||]) {
|
||||
input.parse().map(BinOp::Or)
|
||||
} else if input.peek(Token![<<]) {
|
||||
input.parse().map(BinOp::Shl)
|
||||
} else if input.peek(Token![>>]) {
|
||||
input.parse().map(BinOp::Shr)
|
||||
} else if input.peek(Token![==]) {
|
||||
input.parse().map(BinOp::Eq)
|
||||
} else if input.peek(Token![<=]) {
|
||||
input.parse().map(BinOp::Le)
|
||||
} else if input.peek(Token![!=]) {
|
||||
input.parse().map(BinOp::Ne)
|
||||
} else if input.peek(Token![>=]) {
|
||||
input.parse().map(BinOp::Ge)
|
||||
} else if input.peek(Token![+]) {
|
||||
input.parse().map(BinOp::Add)
|
||||
} else if input.peek(Token![-]) {
|
||||
input.parse().map(BinOp::Sub)
|
||||
} else if input.peek(Token![*]) {
|
||||
input.parse().map(BinOp::Mul)
|
||||
} else if input.peek(Token![/]) {
|
||||
input.parse().map(BinOp::Div)
|
||||
} else if input.peek(Token![%]) {
|
||||
input.parse().map(BinOp::Rem)
|
||||
} else if input.peek(Token![^]) {
|
||||
input.parse().map(BinOp::BitXor)
|
||||
} else if input.peek(Token![&]) {
|
||||
input.parse().map(BinOp::BitAnd)
|
||||
} else if input.peek(Token![|]) {
|
||||
input.parse().map(BinOp::BitOr)
|
||||
} else if input.peek(Token![<]) {
|
||||
input.parse().map(BinOp::Lt)
|
||||
} else if input.peek(Token![>]) {
|
||||
input.parse().map(BinOp::Gt)
|
||||
} else {
|
||||
Err(input.error("expected binary operator"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for UnOp {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![*]) {
|
||||
input.parse().map(UnOp::Deref)
|
||||
} else if lookahead.peek(Token![!]) {
|
||||
input.parse().map(UnOp::Not)
|
||||
} else if lookahead.peek(Token![-]) {
|
||||
input.parse().map(UnOp::Neg)
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use crate::op::{BinOp, UnOp};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for BinOp {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
BinOp::Add(t) => t.to_tokens(tokens),
|
||||
BinOp::Sub(t) => t.to_tokens(tokens),
|
||||
BinOp::Mul(t) => t.to_tokens(tokens),
|
||||
BinOp::Div(t) => t.to_tokens(tokens),
|
||||
BinOp::Rem(t) => t.to_tokens(tokens),
|
||||
BinOp::And(t) => t.to_tokens(tokens),
|
||||
BinOp::Or(t) => t.to_tokens(tokens),
|
||||
BinOp::BitXor(t) => t.to_tokens(tokens),
|
||||
BinOp::BitAnd(t) => t.to_tokens(tokens),
|
||||
BinOp::BitOr(t) => t.to_tokens(tokens),
|
||||
BinOp::Shl(t) => t.to_tokens(tokens),
|
||||
BinOp::Shr(t) => t.to_tokens(tokens),
|
||||
BinOp::Eq(t) => t.to_tokens(tokens),
|
||||
BinOp::Lt(t) => t.to_tokens(tokens),
|
||||
BinOp::Le(t) => t.to_tokens(tokens),
|
||||
BinOp::Ne(t) => t.to_tokens(tokens),
|
||||
BinOp::Ge(t) => t.to_tokens(tokens),
|
||||
BinOp::Gt(t) => t.to_tokens(tokens),
|
||||
BinOp::AddAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::SubAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::MulAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::DivAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::RemAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::BitXorAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::BitAndAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::BitOrAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::ShlAssign(t) => t.to_tokens(tokens),
|
||||
BinOp::ShrAssign(t) => t.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for UnOp {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
UnOp::Deref(t) => t.to_tokens(tokens),
|
||||
UnOp::Not(t) => t.to_tokens(tokens),
|
||||
UnOp::Neg(t) => t.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,130 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
/// Parse the input TokenStream of a macro, triggering a compile error if the
|
||||
/// tokens fail to parse.
|
||||
///
|
||||
/// Refer to the [`parse` module] documentation for more details about parsing
|
||||
/// in Syn.
|
||||
///
|
||||
/// [`parse` module]: mod@crate::parse
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Intended usage
|
||||
///
|
||||
/// This macro must be called from a function that returns
|
||||
/// `proc_macro::TokenStream`. Usually this will be your proc macro entry point,
|
||||
/// the function that has the #\[proc_macro\] / #\[proc_macro_derive\] /
|
||||
/// #\[proc_macro_attribute\] attribute.
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, Result};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// struct MyMacroInput {
|
||||
/// /* ... */
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for MyMacroInput {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// /* ... */
|
||||
/// # Ok(MyMacroInput {})
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro]
|
||||
/// # };
|
||||
/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(tokens as MyMacroInput);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Usage with Parser
|
||||
///
|
||||
/// This macro can also be used with the [`Parser` trait] for types that have
|
||||
/// multiple ways that they can be parsed.
|
||||
///
|
||||
/// [`Parser` trait]: crate::parse::Parser
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// # use proc_macro::TokenStream;
|
||||
/// # use syn::{parse_macro_input, Result};
|
||||
/// # use syn::parse::ParseStream;
|
||||
/// #
|
||||
/// # struct MyMacroInput {}
|
||||
/// #
|
||||
/// impl MyMacroInput {
|
||||
/// fn parse_alternate(input: ParseStream) -> Result<Self> {
|
||||
/// /* ... */
|
||||
/// # Ok(MyMacroInput {})
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro]
|
||||
/// # };
|
||||
/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(tokens with MyMacroInput::parse_alternate);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Expansion
|
||||
///
|
||||
/// `parse_macro_input!($variable as $Type)` expands to something like:
|
||||
///
|
||||
/// ```no_run
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// # macro_rules! doc_test {
|
||||
/// # ($variable:ident as $Type:ty) => {
|
||||
/// match syn::parse::<$Type>($variable) {
|
||||
/// Ok(syntax_tree) => syntax_tree,
|
||||
/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
|
||||
/// }
|
||||
/// # };
|
||||
/// # }
|
||||
/// #
|
||||
/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
/// # let _ = doc_test!(input as syn::Ident);
|
||||
/// # proc_macro::TokenStream::new()
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
|
||||
macro_rules! parse_macro_input {
|
||||
($tokenstream:ident as $ty:ty) => {
|
||||
match $crate::parse::<$ty>($tokenstream) {
|
||||
$crate::__private::Ok(data) => data,
|
||||
$crate::__private::Err(err) => {
|
||||
return $crate::__private::TokenStream::from(err.to_compile_error());
|
||||
}
|
||||
}
|
||||
};
|
||||
($tokenstream:ident with $parser:path) => {
|
||||
match $crate::parse::Parser::parse($parser, $tokenstream) {
|
||||
$crate::__private::Ok(data) => data,
|
||||
$crate::__private::Err(err) => {
|
||||
return $crate::__private::TokenStream::from(err.to_compile_error());
|
||||
}
|
||||
}
|
||||
};
|
||||
($tokenstream:ident) => {
|
||||
$crate::parse_macro_input!($tokenstream as _)
|
||||
};
|
||||
}
|
||||
|
|
@ -0,0 +1,242 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
/// Quasi-quotation macro that accepts input like the [`quote!`] macro but uses
|
||||
/// type inference to figure out a return type for those tokens.
|
||||
///
|
||||
/// [`quote!`]: https://docs.rs/quote/1.0/quote/index.html
|
||||
///
|
||||
/// The return type can be any syntax tree node that implements the [`Parse`]
|
||||
/// trait.
|
||||
///
|
||||
/// [`Parse`]: crate::parse::Parse
|
||||
///
|
||||
/// ```
|
||||
/// use quote::quote;
|
||||
/// use syn::{parse_quote, Stmt};
|
||||
///
|
||||
/// fn main() {
|
||||
/// let name = quote!(v);
|
||||
/// let ty = quote!(u8);
|
||||
///
|
||||
/// let stmt: Stmt = parse_quote! {
|
||||
/// let #name: #ty = Default::default();
|
||||
/// };
|
||||
///
|
||||
/// println!("{:#?}", stmt);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// *This macro is available only if Syn is built with both the `"parsing"` and
|
||||
/// `"printing"` features.*
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// The following helper function adds a bound `T: HeapSize` to every type
|
||||
/// parameter `T` in the input generics.
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Generics, GenericParam};
|
||||
///
|
||||
/// // Add a bound `T: HeapSize` to every type parameter T.
|
||||
/// fn add_trait_bounds(mut generics: Generics) -> Generics {
|
||||
/// for param in &mut generics.params {
|
||||
/// if let GenericParam::Type(type_param) = param {
|
||||
/// type_param.bounds.push(parse_quote!(HeapSize));
|
||||
/// }
|
||||
/// }
|
||||
/// generics
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// # Special cases
|
||||
///
|
||||
/// This macro can parse the following additional types as a special case even
|
||||
/// though they do not implement the `Parse` trait.
|
||||
///
|
||||
/// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]`
|
||||
/// or inner like `#![...]`
|
||||
/// - [`Vec<Attribute>`] — parses multiple attributes, including mixed kinds in
|
||||
/// any order
|
||||
/// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
|
||||
/// `P` with optional trailing punctuation
|
||||
/// - [`Vec<Arm>`] — parses arms separated by optional commas according to the
|
||||
/// same grammar as the inside of a `match` expression
|
||||
/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
|
||||
/// - [`Pat`], [`Box<Pat>`] — parses the same as
|
||||
/// `Pat::parse_multi_with_leading_vert`
|
||||
/// - [`Field`] — parses a named or unnamed struct field
|
||||
///
|
||||
/// [`Vec<Attribute>`]: Attribute
|
||||
/// [`Vec<Arm>`]: Arm
|
||||
/// [`Vec<Stmt>`]: Block::parse_within
|
||||
/// [`Pat`]: Pat::parse_multi_with_leading_vert
|
||||
/// [`Box<Pat>`]: Pat::parse_multi_with_leading_vert
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the tokens fail to parse as the expected syntax tree type. The
|
||||
/// caller is responsible for ensuring that the input tokens are syntactically
|
||||
/// valid.
|
||||
#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))]
|
||||
#[macro_export]
|
||||
macro_rules! parse_quote {
|
||||
($($tt:tt)*) => {
|
||||
$crate::__private::parse_quote($crate::__private::quote::quote!($($tt)*))
|
||||
};
|
||||
}
|
||||
|
||||
/// This macro is [`parse_quote!`] + [`quote_spanned!`][quote::quote_spanned].
|
||||
///
|
||||
/// Please refer to each of their documentation.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use quote::{quote, quote_spanned};
|
||||
/// use syn::spanned::Spanned;
|
||||
/// use syn::{parse_quote_spanned, ReturnType, Signature};
|
||||
///
|
||||
/// // Changes `fn()` to `fn() -> Pin<Box<dyn Future<Output = ()>>>`,
|
||||
/// // and `fn() -> T` to `fn() -> Pin<Box<dyn Future<Output = T>>>`,
|
||||
/// // without introducing any call_site() spans.
|
||||
/// fn make_ret_pinned_future(sig: &mut Signature) {
|
||||
/// let ret = match &sig.output {
|
||||
/// ReturnType::Default => quote_spanned!(sig.paren_token.span=> ()),
|
||||
/// ReturnType::Type(_, ret) => quote!(#ret),
|
||||
/// };
|
||||
/// sig.output = parse_quote_spanned! {ret.span()=>
|
||||
/// -> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = #ret>>>
|
||||
/// };
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))]
|
||||
#[macro_export]
|
||||
macro_rules! parse_quote_spanned {
|
||||
($span:expr=> $($tt:tt)*) => {
|
||||
$crate::__private::parse_quote($crate::__private::quote::quote_spanned!($span=> $($tt)*))
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Can parse any type that implements Parse.
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::parse::{Parse, ParseStream, Parser};
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn parse<T: ParseQuote>(token_stream: TokenStream) -> T {
|
||||
let parser = T::parse;
|
||||
match parser.parse2(token_stream) {
|
||||
Ok(t) => t,
|
||||
Err(err) => panic!("{}", err),
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub trait ParseQuote: Sized {
|
||||
fn parse(input: ParseStream) -> Result<Self>;
|
||||
}
|
||||
|
||||
impl<T: Parse> ParseQuote for T {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
<T as Parse>::parse(input)
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Any other types that we want `parse_quote!` to be able to parse.
|
||||
|
||||
use crate::punctuated::Punctuated;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
use crate::{attr, Attribute, Field, FieldMutability, Ident, Type, Visibility};
|
||||
#[cfg(feature = "full")]
|
||||
use crate::{Arm, Block, Pat, Stmt};
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
impl ParseQuote for Attribute {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Token![#]) && input.peek2(Token![!]) {
|
||||
attr::parsing::single_parse_inner(input)
|
||||
} else {
|
||||
attr::parsing::single_parse_outer(input)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
impl ParseQuote for Vec<Attribute> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let mut attrs = Vec::new();
|
||||
while !input.is_empty() {
|
||||
attrs.push(ParseQuote::parse(input)?);
|
||||
}
|
||||
Ok(attrs)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
impl ParseQuote for Field {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let vis: Visibility = input.parse()?;
|
||||
|
||||
let ident: Option<Ident>;
|
||||
let colon_token: Option<Token![:]>;
|
||||
let is_named = input.peek(Ident) && input.peek2(Token![:]) && !input.peek2(Token![::]);
|
||||
if is_named {
|
||||
ident = Some(input.parse()?);
|
||||
colon_token = Some(input.parse()?);
|
||||
} else {
|
||||
ident = None;
|
||||
colon_token = None;
|
||||
}
|
||||
|
||||
let ty: Type = input.parse()?;
|
||||
|
||||
Ok(Field {
|
||||
attrs,
|
||||
vis,
|
||||
mutability: FieldMutability::None,
|
||||
ident,
|
||||
colon_token,
|
||||
ty,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ParseQuote for Pat {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Pat::parse_multi_with_leading_vert(input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ParseQuote for Box<Pat> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
<Pat as ParseQuote>::parse(input).map(Box::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse_terminated(input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ParseQuote for Vec<Stmt> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Block::parse_within(input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ParseQuote for Vec<Arm> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Arm::parse_multiple(input)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,957 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::attr::Attribute;
|
||||
use crate::expr::Member;
|
||||
use crate::ident::Ident;
|
||||
use crate::path::{Path, QSelf};
|
||||
use crate::punctuated::Punctuated;
|
||||
use crate::token;
|
||||
use crate::ty::Type;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
pub use crate::expr::{
|
||||
ExprConst as PatConst, ExprLit as PatLit, ExprMacro as PatMacro, ExprPath as PatPath,
|
||||
ExprRange as PatRange,
|
||||
};
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// A pattern in a local binding, function signature, match expression, or
|
||||
/// various other places.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
#[non_exhaustive]
|
||||
pub enum Pat {
|
||||
/// A const block: `const { ... }`.
|
||||
Const(PatConst),
|
||||
|
||||
/// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
|
||||
Ident(PatIdent),
|
||||
|
||||
/// A literal pattern: `0`.
|
||||
Lit(PatLit),
|
||||
|
||||
/// A macro in pattern position.
|
||||
Macro(PatMacro),
|
||||
|
||||
/// A pattern that matches any one of a set of cases.
|
||||
Or(PatOr),
|
||||
|
||||
/// A parenthesized pattern: `(A | B)`.
|
||||
Paren(PatParen),
|
||||
|
||||
/// A path pattern like `Color::Red`, optionally qualified with a
|
||||
/// self-type.
|
||||
///
|
||||
/// Unqualified path patterns can legally refer to variants, structs,
|
||||
/// constants or associated constants. Qualified path patterns like
|
||||
/// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
|
||||
/// associated constants.
|
||||
Path(PatPath),
|
||||
|
||||
/// A range pattern: `1..=2`.
|
||||
Range(PatRange),
|
||||
|
||||
/// A reference pattern: `&mut var`.
|
||||
Reference(PatReference),
|
||||
|
||||
/// The dots in a tuple or slice pattern: `[0, 1, ..]`.
|
||||
Rest(PatRest),
|
||||
|
||||
/// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
|
||||
Slice(PatSlice),
|
||||
|
||||
/// A struct or struct variant pattern: `Variant { x, y, .. }`.
|
||||
Struct(PatStruct),
|
||||
|
||||
/// A tuple pattern: `(a, b)`.
|
||||
Tuple(PatTuple),
|
||||
|
||||
/// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
|
||||
TupleStruct(PatTupleStruct),
|
||||
|
||||
/// A type ascription pattern: `foo: f64`.
|
||||
Type(PatType),
|
||||
|
||||
/// Tokens in pattern position not interpreted by Syn.
|
||||
Verbatim(TokenStream),
|
||||
|
||||
/// A pattern that matches any value: `_`.
|
||||
Wild(PatWild),
|
||||
|
||||
// For testing exhaustiveness in downstream code, use the following idiom:
|
||||
//
|
||||
// match pat {
|
||||
// #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
|
||||
//
|
||||
// Pat::Box(pat) => {...}
|
||||
// Pat::Ident(pat) => {...}
|
||||
// ...
|
||||
// Pat::Wild(pat) => {...}
|
||||
//
|
||||
// _ => { /* some sane fallback */ }
|
||||
// }
|
||||
//
|
||||
// This way we fail your tests but don't break your library when adding
|
||||
// a variant. You will be notified by a test failure when a variant is
|
||||
// added, so that you can add code to handle it, but your library will
|
||||
// continue to compile and work for downstream users in the interim.
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
|
||||
///
|
||||
/// It may also be a unit struct or struct variant (e.g. `None`), or a
|
||||
/// constant; these cannot be distinguished syntactically.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatIdent {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub by_ref: Option<Token![ref]>,
|
||||
pub mutability: Option<Token![mut]>,
|
||||
pub ident: Ident,
|
||||
pub subpat: Option<(Token![@], Box<Pat>)>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A pattern that matches any one of a set of cases.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatOr {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub leading_vert: Option<Token![|]>,
|
||||
pub cases: Punctuated<Pat, Token![|]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A parenthesized pattern: `(A | B)`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatParen {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub paren_token: token::Paren,
|
||||
pub pat: Box<Pat>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A reference pattern: `&mut var`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatReference {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub and_token: Token![&],
|
||||
pub mutability: Option<Token![mut]>,
|
||||
pub pat: Box<Pat>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// The dots in a tuple or slice pattern: `[0, 1, ..]`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatRest {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub dot2_token: Token![..],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatSlice {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub bracket_token: token::Bracket,
|
||||
pub elems: Punctuated<Pat, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A struct or struct variant pattern: `Variant { x, y, .. }`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatStruct {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub qself: Option<QSelf>,
|
||||
pub path: Path,
|
||||
pub brace_token: token::Brace,
|
||||
pub fields: Punctuated<FieldPat, Token![,]>,
|
||||
pub rest: Option<PatRest>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A tuple pattern: `(a, b)`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatTuple {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub paren_token: token::Paren,
|
||||
pub elems: Punctuated<Pat, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatTupleStruct {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub qself: Option<QSelf>,
|
||||
pub path: Path,
|
||||
pub paren_token: token::Paren,
|
||||
pub elems: Punctuated<Pat, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A type ascription pattern: `foo: f64`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatType {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub pat: Box<Pat>,
|
||||
pub colon_token: Token![:],
|
||||
pub ty: Box<Type>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A pattern that matches any value: `_`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct PatWild {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub underscore_token: Token![_],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A single field in a struct pattern.
|
||||
///
|
||||
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
|
||||
/// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct FieldPat {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub member: Member,
|
||||
pub colon_token: Option<Token![:]>,
|
||||
pub pat: Box<Pat>,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::attr::Attribute;
|
||||
use crate::error::{self, Result};
|
||||
use crate::expr::{
|
||||
Expr, ExprConst, ExprLit, ExprMacro, ExprPath, ExprRange, Member, RangeLimits,
|
||||
};
|
||||
use crate::ext::IdentExt as _;
|
||||
use crate::ident::Ident;
|
||||
use crate::lit::Lit;
|
||||
use crate::mac::{self, Macro};
|
||||
use crate::parse::{Parse, ParseBuffer, ParseStream};
|
||||
use crate::pat::{
|
||||
FieldPat, Pat, PatIdent, PatOr, PatParen, PatReference, PatRest, PatSlice, PatStruct,
|
||||
PatTuple, PatTupleStruct, PatType, PatWild,
|
||||
};
|
||||
use crate::path::{self, Path, QSelf};
|
||||
use crate::punctuated::Punctuated;
|
||||
use crate::stmt::Block;
|
||||
use crate::token;
|
||||
use crate::verbatim;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Pat {
|
||||
/// Parse a pattern that does _not_ involve `|` at the top level.
|
||||
///
|
||||
/// This parser matches the behavior of the `$:pat_param` macro_rules
|
||||
/// matcher, and on editions prior to Rust 2021, the behavior of
|
||||
/// `$:pat`.
|
||||
///
|
||||
/// In Rust syntax, some examples of where this syntax would occur are
|
||||
/// in the argument pattern of functions and closures. Patterns using
|
||||
/// `|` are not allowed to occur in these positions.
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// fn f(Some(_) | None: Option<T>) {
|
||||
/// let _ = |Some(_) | None: Option<T>| {};
|
||||
/// // ^^^^^^^^^^^^^^^^^^^^^^^^^??? :(
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// ```console
|
||||
/// error: top-level or-patterns are not allowed in function parameters
|
||||
/// --> src/main.rs:1:6
|
||||
/// |
|
||||
/// 1 | fn f(Some(_) | None: Option<T>) {
|
||||
/// | ^^^^^^^^^^^^^^ help: wrap the pattern in parentheses: `(Some(_) | None)`
|
||||
/// ```
|
||||
pub fn parse_single(input: ParseStream) -> Result<Self> {
|
||||
let begin = input.fork();
|
||||
let lookahead = input.lookahead1();
|
||||
if lookahead.peek(Ident)
|
||||
&& (input.peek2(Token![::])
|
||||
|| input.peek2(Token![!])
|
||||
|| input.peek2(token::Brace)
|
||||
|| input.peek2(token::Paren)
|
||||
|| input.peek2(Token![..]))
|
||||
|| input.peek(Token![self]) && input.peek2(Token![::])
|
||||
|| lookahead.peek(Token![::])
|
||||
|| lookahead.peek(Token![<])
|
||||
|| input.peek(Token![Self])
|
||||
|| input.peek(Token![super])
|
||||
|| input.peek(Token![crate])
|
||||
{
|
||||
pat_path_or_macro_or_struct_or_range(input)
|
||||
} else if lookahead.peek(Token![_]) {
|
||||
input.call(pat_wild).map(Pat::Wild)
|
||||
} else if input.peek(Token![box]) {
|
||||
pat_box(begin, input)
|
||||
} else if input.peek(Token![-]) || lookahead.peek(Lit) || lookahead.peek(Token![const])
|
||||
{
|
||||
pat_lit_or_range(input)
|
||||
} else if lookahead.peek(Token![ref])
|
||||
|| lookahead.peek(Token![mut])
|
||||
|| input.peek(Token![self])
|
||||
|| input.peek(Ident)
|
||||
{
|
||||
input.call(pat_ident).map(Pat::Ident)
|
||||
} else if lookahead.peek(Token![&]) {
|
||||
input.call(pat_reference).map(Pat::Reference)
|
||||
} else if lookahead.peek(token::Paren) {
|
||||
input.call(pat_paren_or_tuple)
|
||||
} else if lookahead.peek(token::Bracket) {
|
||||
input.call(pat_slice).map(Pat::Slice)
|
||||
} else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
|
||||
pat_range_half_open(input)
|
||||
} else if lookahead.peek(Token![const]) {
|
||||
input.call(pat_const).map(Pat::Verbatim)
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a pattern, possibly involving `|`, but not a leading `|`.
|
||||
pub fn parse_multi(input: ParseStream) -> Result<Self> {
|
||||
multi_pat_impl(input, None)
|
||||
}
|
||||
|
||||
/// Parse a pattern, possibly involving `|`, possibly including a
|
||||
/// leading `|`.
|
||||
///
|
||||
/// This parser matches the behavior of the Rust 2021 edition's `$:pat`
|
||||
/// macro_rules matcher.
|
||||
///
|
||||
/// In Rust syntax, an example of where this syntax would occur is in
|
||||
/// the pattern of a `match` arm, where the language permits an optional
|
||||
/// leading `|`, although it is not idiomatic to write one there in
|
||||
/// handwritten code.
|
||||
///
|
||||
/// ```
|
||||
/// # let wat = None;
|
||||
/// match wat {
|
||||
/// | None | Some(false) => {}
|
||||
/// | Some(true) => {}
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The compiler accepts it only to facilitate some situations in
|
||||
/// macro-generated code where a macro author might need to write:
|
||||
///
|
||||
/// ```
|
||||
/// # macro_rules! doc {
|
||||
/// # ($value:expr, ($($conditions1:pat),*), ($($conditions2:pat),*), $then:expr) => {
|
||||
/// match $value {
|
||||
/// $(| $conditions1)* $(| $conditions2)* => $then
|
||||
/// }
|
||||
/// # };
|
||||
/// # }
|
||||
/// #
|
||||
/// # doc!(true, (true), (false), {});
|
||||
/// # doc!(true, (), (true, false), {});
|
||||
/// # doc!(true, (true, false), (), {});
|
||||
/// ```
|
||||
///
|
||||
/// Expressing the same thing correctly in the case that either one (but
|
||||
/// not both) of `$conditions1` and `$conditions2` might be empty,
|
||||
/// without leading `|`, is complex.
|
||||
///
|
||||
/// Use [`Pat::parse_multi`] instead if you are not intending to support
|
||||
/// macro-generated macro input.
|
||||
pub fn parse_multi_with_leading_vert(input: ParseStream) -> Result<Self> {
|
||||
let leading_vert: Option<Token![|]> = input.parse()?;
|
||||
multi_pat_impl(input, leading_vert)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for PatType {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(PatType {
|
||||
attrs: Vec::new(),
|
||||
pat: Box::new(Pat::parse_single(input)?),
|
||||
colon_token: input.parse()?,
|
||||
ty: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
|
||||
let mut pat = Pat::parse_single(input)?;
|
||||
if leading_vert.is_some()
|
||||
|| input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
|
||||
{
|
||||
let mut cases = Punctuated::new();
|
||||
cases.push_value(pat);
|
||||
while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
|
||||
let punct = input.parse()?;
|
||||
cases.push_punct(punct);
|
||||
let pat = Pat::parse_single(input)?;
|
||||
cases.push_value(pat);
|
||||
}
|
||||
pat = Pat::Or(PatOr {
|
||||
attrs: Vec::new(),
|
||||
leading_vert,
|
||||
cases,
|
||||
});
|
||||
}
|
||||
Ok(pat)
|
||||
}
|
||||
|
||||
fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
|
||||
let expr_style = true;
|
||||
let (qself, path) = path::parsing::qpath(input, expr_style)?;
|
||||
|
||||
if qself.is_none()
|
||||
&& input.peek(Token![!])
|
||||
&& !input.peek(Token![!=])
|
||||
&& path.is_mod_style()
|
||||
{
|
||||
let bang_token: Token![!] = input.parse()?;
|
||||
let (delimiter, tokens) = mac::parse_delimiter(input)?;
|
||||
return Ok(Pat::Macro(ExprMacro {
|
||||
attrs: Vec::new(),
|
||||
mac: Macro {
|
||||
path,
|
||||
bang_token,
|
||||
delimiter,
|
||||
tokens,
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
if input.peek(token::Brace) {
|
||||
pat_struct(input, qself, path).map(Pat::Struct)
|
||||
} else if input.peek(token::Paren) {
|
||||
pat_tuple_struct(input, qself, path).map(Pat::TupleStruct)
|
||||
} else if input.peek(Token![..]) {
|
||||
pat_range(input, qself, path)
|
||||
} else {
|
||||
Ok(Pat::Path(ExprPath {
|
||||
attrs: Vec::new(),
|
||||
qself,
|
||||
path,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
fn pat_wild(input: ParseStream) -> Result<PatWild> {
|
||||
Ok(PatWild {
|
||||
attrs: Vec::new(),
|
||||
underscore_token: input.parse()?,
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_box(begin: ParseBuffer, input: ParseStream) -> Result<Pat> {
|
||||
input.parse::<Token![box]>()?;
|
||||
Pat::parse_single(input)?;
|
||||
Ok(Pat::Verbatim(verbatim::between(&begin, input)))
|
||||
}
|
||||
|
||||
fn pat_ident(input: ParseStream) -> Result<PatIdent> {
|
||||
Ok(PatIdent {
|
||||
attrs: Vec::new(),
|
||||
by_ref: input.parse()?,
|
||||
mutability: input.parse()?,
|
||||
ident: {
|
||||
if input.peek(Token![self]) {
|
||||
input.call(Ident::parse_any)?
|
||||
} else {
|
||||
input.parse()?
|
||||
}
|
||||
},
|
||||
subpat: {
|
||||
if input.peek(Token![@]) {
|
||||
let at_token: Token![@] = input.parse()?;
|
||||
let subpat = Pat::parse_single(input)?;
|
||||
Some((at_token, Box::new(subpat)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_tuple_struct(
|
||||
input: ParseStream,
|
||||
qself: Option<QSelf>,
|
||||
path: Path,
|
||||
) -> Result<PatTupleStruct> {
|
||||
let content;
|
||||
let paren_token = parenthesized!(content in input);
|
||||
|
||||
let mut elems = Punctuated::new();
|
||||
while !content.is_empty() {
|
||||
let value = Pat::parse_multi_with_leading_vert(&content)?;
|
||||
elems.push_value(value);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
}
|
||||
let punct = content.parse()?;
|
||||
elems.push_punct(punct);
|
||||
}
|
||||
|
||||
Ok(PatTupleStruct {
|
||||
attrs: Vec::new(),
|
||||
qself,
|
||||
path,
|
||||
paren_token,
|
||||
elems,
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_struct(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatStruct> {
|
||||
let content;
|
||||
let brace_token = braced!(content in input);
|
||||
|
||||
let mut fields = Punctuated::new();
|
||||
let mut rest = None;
|
||||
while !content.is_empty() {
|
||||
let attrs = content.call(Attribute::parse_outer)?;
|
||||
if content.peek(Token![..]) {
|
||||
rest = Some(PatRest {
|
||||
attrs,
|
||||
dot2_token: content.parse()?,
|
||||
});
|
||||
break;
|
||||
}
|
||||
let mut value = content.call(field_pat)?;
|
||||
value.attrs = attrs;
|
||||
fields.push_value(value);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
}
|
||||
let punct: Token![,] = content.parse()?;
|
||||
fields.push_punct(punct);
|
||||
}
|
||||
|
||||
Ok(PatStruct {
|
||||
attrs: Vec::new(),
|
||||
qself,
|
||||
path,
|
||||
brace_token,
|
||||
fields,
|
||||
rest,
|
||||
})
|
||||
}
|
||||
|
||||
fn field_pat(input: ParseStream) -> Result<FieldPat> {
|
||||
let begin = input.fork();
|
||||
let boxed: Option<Token![box]> = input.parse()?;
|
||||
let by_ref: Option<Token![ref]> = input.parse()?;
|
||||
let mutability: Option<Token![mut]> = input.parse()?;
|
||||
|
||||
let member = if boxed.is_some() || by_ref.is_some() || mutability.is_some() {
|
||||
input.parse().map(Member::Named)
|
||||
} else {
|
||||
input.parse()
|
||||
}?;
|
||||
|
||||
if boxed.is_none() && by_ref.is_none() && mutability.is_none() && input.peek(Token![:])
|
||||
|| !member.is_named()
|
||||
{
|
||||
return Ok(FieldPat {
|
||||
attrs: Vec::new(),
|
||||
member,
|
||||
colon_token: Some(input.parse()?),
|
||||
pat: Box::new(Pat::parse_multi_with_leading_vert(input)?),
|
||||
});
|
||||
}
|
||||
|
||||
let ident = match member {
|
||||
Member::Named(ident) => ident,
|
||||
Member::Unnamed(_) => unreachable!(),
|
||||
};
|
||||
|
||||
let pat = if boxed.is_some() {
|
||||
Pat::Verbatim(verbatim::between(&begin, input))
|
||||
} else {
|
||||
Pat::Ident(PatIdent {
|
||||
attrs: Vec::new(),
|
||||
by_ref,
|
||||
mutability,
|
||||
ident: ident.clone(),
|
||||
subpat: None,
|
||||
})
|
||||
};
|
||||
|
||||
Ok(FieldPat {
|
||||
attrs: Vec::new(),
|
||||
member: Member::Named(ident),
|
||||
colon_token: None,
|
||||
pat: Box::new(pat),
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<Pat> {
|
||||
let limits = RangeLimits::parse_obsolete(input)?;
|
||||
let end = input.call(pat_range_bound)?;
|
||||
if let (RangeLimits::Closed(_), None) = (&limits, &end) {
|
||||
return Err(input.error("expected range upper bound"));
|
||||
}
|
||||
Ok(Pat::Range(ExprRange {
|
||||
attrs: Vec::new(),
|
||||
start: Some(Box::new(Expr::Path(ExprPath {
|
||||
attrs: Vec::new(),
|
||||
qself,
|
||||
path,
|
||||
}))),
|
||||
limits,
|
||||
end: end.map(PatRangeBound::into_expr),
|
||||
}))
|
||||
}
|
||||
|
||||
fn pat_range_half_open(input: ParseStream) -> Result<Pat> {
|
||||
let limits: RangeLimits = input.parse()?;
|
||||
let end = input.call(pat_range_bound)?;
|
||||
if end.is_some() {
|
||||
Ok(Pat::Range(ExprRange {
|
||||
attrs: Vec::new(),
|
||||
start: None,
|
||||
limits,
|
||||
end: end.map(PatRangeBound::into_expr),
|
||||
}))
|
||||
} else {
|
||||
match limits {
|
||||
RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
|
||||
attrs: Vec::new(),
|
||||
dot2_token,
|
||||
})),
|
||||
RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pat_paren_or_tuple(input: ParseStream) -> Result<Pat> {
|
||||
let content;
|
||||
let paren_token = parenthesized!(content in input);
|
||||
|
||||
let mut elems = Punctuated::new();
|
||||
while !content.is_empty() {
|
||||
let value = Pat::parse_multi_with_leading_vert(&content)?;
|
||||
if content.is_empty() {
|
||||
if elems.is_empty() && !matches!(value, Pat::Rest(_)) {
|
||||
return Ok(Pat::Paren(PatParen {
|
||||
attrs: Vec::new(),
|
||||
paren_token,
|
||||
pat: Box::new(value),
|
||||
}));
|
||||
}
|
||||
elems.push_value(value);
|
||||
break;
|
||||
}
|
||||
elems.push_value(value);
|
||||
let punct = content.parse()?;
|
||||
elems.push_punct(punct);
|
||||
}
|
||||
|
||||
Ok(Pat::Tuple(PatTuple {
|
||||
attrs: Vec::new(),
|
||||
paren_token,
|
||||
elems,
|
||||
}))
|
||||
}
|
||||
|
||||
fn pat_reference(input: ParseStream) -> Result<PatReference> {
|
||||
Ok(PatReference {
|
||||
attrs: Vec::new(),
|
||||
and_token: input.parse()?,
|
||||
mutability: input.parse()?,
|
||||
pat: Box::new(Pat::parse_single(input)?),
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
|
||||
let start = input.call(pat_range_bound)?.unwrap();
|
||||
if input.peek(Token![..]) {
|
||||
let limits = RangeLimits::parse_obsolete(input)?;
|
||||
let end = input.call(pat_range_bound)?;
|
||||
if let (RangeLimits::Closed(_), None) = (&limits, &end) {
|
||||
return Err(input.error("expected range upper bound"));
|
||||
}
|
||||
Ok(Pat::Range(ExprRange {
|
||||
attrs: Vec::new(),
|
||||
start: Some(start.into_expr()),
|
||||
limits,
|
||||
end: end.map(PatRangeBound::into_expr),
|
||||
}))
|
||||
} else {
|
||||
Ok(start.into_pat())
|
||||
}
|
||||
}
|
||||
|
||||
// Patterns that can appear on either side of a range pattern.
|
||||
enum PatRangeBound {
|
||||
Const(ExprConst),
|
||||
Lit(ExprLit),
|
||||
Path(ExprPath),
|
||||
}
|
||||
|
||||
impl PatRangeBound {
|
||||
fn into_expr(self) -> Box<Expr> {
|
||||
Box::new(match self {
|
||||
PatRangeBound::Const(pat) => Expr::Const(pat),
|
||||
PatRangeBound::Lit(pat) => Expr::Lit(pat),
|
||||
PatRangeBound::Path(pat) => Expr::Path(pat),
|
||||
})
|
||||
}
|
||||
|
||||
fn into_pat(self) -> Pat {
|
||||
match self {
|
||||
PatRangeBound::Const(pat) => Pat::Const(pat),
|
||||
PatRangeBound::Lit(pat) => Pat::Lit(pat),
|
||||
PatRangeBound::Path(pat) => Pat::Path(pat),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pat_range_bound(input: ParseStream) -> Result<Option<PatRangeBound>> {
|
||||
if input.is_empty()
|
||||
|| input.peek(Token![|])
|
||||
|| input.peek(Token![=])
|
||||
|| input.peek(Token![:]) && !input.peek(Token![::])
|
||||
|| input.peek(Token![,])
|
||||
|| input.peek(Token![;])
|
||||
|| input.peek(Token![if])
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let lookahead = input.lookahead1();
|
||||
let expr = if lookahead.peek(Lit) {
|
||||
PatRangeBound::Lit(input.parse()?)
|
||||
} else if lookahead.peek(Ident)
|
||||
|| lookahead.peek(Token![::])
|
||||
|| lookahead.peek(Token![<])
|
||||
|| lookahead.peek(Token![self])
|
||||
|| lookahead.peek(Token![Self])
|
||||
|| lookahead.peek(Token![super])
|
||||
|| lookahead.peek(Token![crate])
|
||||
{
|
||||
PatRangeBound::Path(input.parse()?)
|
||||
} else if lookahead.peek(Token![const]) {
|
||||
PatRangeBound::Const(input.parse()?)
|
||||
} else {
|
||||
return Err(lookahead.error());
|
||||
};
|
||||
|
||||
Ok(Some(expr))
|
||||
}
|
||||
|
||||
fn pat_slice(input: ParseStream) -> Result<PatSlice> {
|
||||
let content;
|
||||
let bracket_token = bracketed!(content in input);
|
||||
|
||||
let mut elems = Punctuated::new();
|
||||
while !content.is_empty() {
|
||||
let value = Pat::parse_multi_with_leading_vert(&content)?;
|
||||
match value {
|
||||
Pat::Range(pat) if pat.start.is_none() || pat.end.is_none() => {
|
||||
let (start, end) = match pat.limits {
|
||||
RangeLimits::HalfOpen(dot_dot) => (dot_dot.spans[0], dot_dot.spans[1]),
|
||||
RangeLimits::Closed(dot_dot_eq) => {
|
||||
(dot_dot_eq.spans[0], dot_dot_eq.spans[2])
|
||||
}
|
||||
};
|
||||
let msg = "range pattern is not allowed unparenthesized inside slice pattern";
|
||||
return Err(error::new2(start, end, msg));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
elems.push_value(value);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
}
|
||||
let punct = content.parse()?;
|
||||
elems.push_punct(punct);
|
||||
}
|
||||
|
||||
Ok(PatSlice {
|
||||
attrs: Vec::new(),
|
||||
bracket_token,
|
||||
elems,
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_const(input: ParseStream) -> Result<TokenStream> {
|
||||
let begin = input.fork();
|
||||
input.parse::<Token![const]>()?;
|
||||
|
||||
let content;
|
||||
braced!(content in input);
|
||||
content.call(Attribute::parse_inner)?;
|
||||
content.call(Block::parse_within)?;
|
||||
|
||||
Ok(verbatim::between(&begin, input))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::pat::{
|
||||
FieldPat, Pat, PatIdent, PatOr, PatParen, PatReference, PatRest, PatSlice, PatStruct,
|
||||
PatTuple, PatTupleStruct, PatType, PatWild,
|
||||
};
|
||||
use crate::path;
|
||||
use crate::path::printing::PathStyle;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatIdent {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.by_ref.to_tokens(tokens);
|
||||
self.mutability.to_tokens(tokens);
|
||||
self.ident.to_tokens(tokens);
|
||||
if let Some((at_token, subpat)) = &self.subpat {
|
||||
at_token.to_tokens(tokens);
|
||||
subpat.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatOr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.leading_vert.to_tokens(tokens);
|
||||
self.cases.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatParen {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.pat.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatReference {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.and_token.to_tokens(tokens);
|
||||
self.mutability.to_tokens(tokens);
|
||||
self.pat.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatRest {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.dot2_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatSlice {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.elems.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatStruct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
path::printing::print_qpath(tokens, &self.qself, &self.path, PathStyle::Expr);
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
self.fields.to_tokens(tokens);
|
||||
// NOTE: We need a comma before the dot2 token if it is present.
|
||||
if !self.fields.empty_or_trailing() && self.rest.is_some() {
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
self.rest.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatTuple {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.elems.to_tokens(tokens);
|
||||
// If there is only one element, a trailing comma is needed to
|
||||
// distinguish PatTuple from PatParen, unless this is `(..)`
|
||||
// which is a tuple pattern even without comma.
|
||||
if self.elems.len() == 1
|
||||
&& !self.elems.trailing_punct()
|
||||
&& !matches!(self.elems[0], Pat::Rest { .. })
|
||||
{
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatTupleStruct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
path::printing::print_qpath(tokens, &self.qself, &self.path, PathStyle::Expr);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.elems.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatType {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.pat.to_tokens(tokens);
|
||||
self.colon_token.to_tokens(tokens);
|
||||
self.ty.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatWild {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.underscore_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldPat {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
if let Some(colon_token) = &self.colon_token {
|
||||
self.member.to_tokens(tokens);
|
||||
colon_token.to_tokens(tokens);
|
||||
}
|
||||
self.pat.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,968 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::error::Result;
|
||||
use crate::expr::Expr;
|
||||
use crate::generics::TypeParamBound;
|
||||
use crate::ident::Ident;
|
||||
use crate::lifetime::Lifetime;
|
||||
use crate::punctuated::Punctuated;
|
||||
use crate::token;
|
||||
use crate::ty::{ReturnType, Type};
|
||||
|
||||
ast_struct! {
|
||||
/// A path at which a named item is exported (e.g. `std::collections::HashMap`).
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Path {
|
||||
pub leading_colon: Option<Token![::]>,
|
||||
pub segments: Punctuated<PathSegment, Token![::]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for Path
|
||||
where
|
||||
T: Into<PathSegment>,
|
||||
{
|
||||
fn from(segment: T) -> Self {
|
||||
let mut path = Path {
|
||||
leading_colon: None,
|
||||
segments: Punctuated::new(),
|
||||
};
|
||||
path.segments.push_value(segment.into());
|
||||
path
|
||||
}
|
||||
}
|
||||
|
||||
impl Path {
|
||||
/// Determines whether this is a path of length 1 equal to the given
|
||||
/// ident.
|
||||
///
|
||||
/// For them to compare equal, it must be the case that:
|
||||
///
|
||||
/// - the path has no leading colon,
|
||||
/// - the number of path segments is 1,
|
||||
/// - the first path segment has no angle bracketed or parenthesized
|
||||
/// path arguments, and
|
||||
/// - the ident of the first path segment is equal to the given one.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use syn::{Attribute, Error, Meta, Result};
|
||||
///
|
||||
/// fn get_serde_meta_item(attr: &Attribute) -> Result<Option<&TokenStream>> {
|
||||
/// if attr.path().is_ident("serde") {
|
||||
/// match &attr.meta {
|
||||
/// Meta::List(meta) => Ok(Some(&meta.tokens)),
|
||||
/// bad => Err(Error::new_spanned(bad, "unrecognized attribute")),
|
||||
/// }
|
||||
/// } else {
|
||||
/// Ok(None)
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub fn is_ident<I>(&self, ident: &I) -> bool
|
||||
where
|
||||
I: ?Sized,
|
||||
Ident: PartialEq<I>,
|
||||
{
|
||||
match self.get_ident() {
|
||||
Some(id) => id == ident,
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// If this path consists of a single ident, returns the ident.
|
||||
///
|
||||
/// A path is considered an ident if:
|
||||
///
|
||||
/// - the path has no leading colon,
|
||||
/// - the number of path segments is 1, and
|
||||
/// - the first path segment has no angle bracketed or parenthesized
|
||||
/// path arguments.
|
||||
pub fn get_ident(&self) -> Option<&Ident> {
|
||||
if self.leading_colon.is_none()
|
||||
&& self.segments.len() == 1
|
||||
&& self.segments[0].arguments.is_none()
|
||||
{
|
||||
Some(&self.segments[0].ident)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// An error if this path is not a single ident, as defined in `get_ident`.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn require_ident(&self) -> Result<&Ident> {
|
||||
self.get_ident().ok_or_else(|| {
|
||||
crate::error::new2(
|
||||
self.segments.first().unwrap().ident.span(),
|
||||
self.segments.last().unwrap().ident.span(),
|
||||
"expected this path to be an identifier",
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A segment of a path together with any path arguments on that segment.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct PathSegment {
|
||||
pub ident: Ident,
|
||||
pub arguments: PathArguments,
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<T> for PathSegment
|
||||
where
|
||||
T: Into<Ident>,
|
||||
{
|
||||
fn from(ident: T) -> Self {
|
||||
PathSegment {
|
||||
ident: ident.into(),
|
||||
arguments: PathArguments::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// Angle bracketed or parenthesized arguments of a path segment.
|
||||
///
|
||||
/// ## Angle bracketed
|
||||
///
|
||||
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
|
||||
///
|
||||
/// ## Parenthesized
|
||||
///
|
||||
/// The `(A, B) -> C` in `Fn(A, B) -> C`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum PathArguments {
|
||||
None,
|
||||
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
|
||||
AngleBracketed(AngleBracketedGenericArguments),
|
||||
/// The `(A, B) -> C` in `Fn(A, B) -> C`.
|
||||
Parenthesized(ParenthesizedGenericArguments),
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PathArguments {
|
||||
fn default() -> Self {
|
||||
PathArguments::None
|
||||
}
|
||||
}
|
||||
|
||||
impl PathArguments {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
PathArguments::None => true,
|
||||
PathArguments::AngleBracketed(bracketed) => bracketed.args.is_empty(),
|
||||
PathArguments::Parenthesized(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_none(&self) -> bool {
|
||||
match self {
|
||||
PathArguments::None => true,
|
||||
PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// An individual generic argument, like `'a`, `T`, or `Item = T`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
#[non_exhaustive]
|
||||
pub enum GenericArgument {
|
||||
/// A lifetime argument.
|
||||
Lifetime(Lifetime),
|
||||
/// A type argument.
|
||||
Type(Type),
|
||||
/// A const expression. Must be inside of a block.
|
||||
///
|
||||
/// NOTE: Identity expressions are represented as Type arguments, as
|
||||
/// they are indistinguishable syntactically.
|
||||
Const(Expr),
|
||||
/// A binding (equality constraint) on an associated type: the `Item =
|
||||
/// u8` in `Iterator<Item = u8>`.
|
||||
AssocType(AssocType),
|
||||
/// An equality constraint on an associated constant: the `PANIC =
|
||||
/// false` in `Trait<PANIC = false>`.
|
||||
AssocConst(AssocConst),
|
||||
/// An associated type bound: `Iterator<Item: Display>`.
|
||||
Constraint(Constraint),
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
|
||||
/// V>`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct AngleBracketedGenericArguments {
|
||||
pub colon2_token: Option<Token![::]>,
|
||||
pub lt_token: Token![<],
|
||||
pub args: Punctuated<GenericArgument, Token![,]>,
|
||||
pub gt_token: Token![>],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A binding (equality constraint) on an associated type: the `Item = u8`
|
||||
/// in `Iterator<Item = u8>`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct AssocType {
|
||||
pub ident: Ident,
|
||||
pub generics: Option<AngleBracketedGenericArguments>,
|
||||
pub eq_token: Token![=],
|
||||
pub ty: Type,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An equality constraint on an associated constant: the `PANIC = false` in
|
||||
/// `Trait<PANIC = false>`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct AssocConst {
|
||||
pub ident: Ident,
|
||||
pub generics: Option<AngleBracketedGenericArguments>,
|
||||
pub eq_token: Token![=],
|
||||
pub value: Expr,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An associated type bound: `Iterator<Item: Display>`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Constraint {
|
||||
pub ident: Ident,
|
||||
pub generics: Option<AngleBracketedGenericArguments>,
|
||||
pub colon_token: Token![:],
|
||||
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
|
||||
/// C`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct ParenthesizedGenericArguments {
|
||||
pub paren_token: token::Paren,
|
||||
/// `(A, B)`
|
||||
pub inputs: Punctuated<Type, Token![,]>,
|
||||
/// `C`
|
||||
pub output: ReturnType,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// The explicit Self type in a qualified path: the `T` in `<T as
|
||||
/// Display>::fmt`.
|
||||
///
|
||||
/// The actual path, including the trait and the associated item, is stored
|
||||
/// separately. The `position` field represents the index of the associated
|
||||
/// item qualified with this Self type.
|
||||
///
|
||||
/// ```text
|
||||
/// <Vec<T> as a::b::Trait>::AssociatedItem
|
||||
/// ^~~~~~ ~~~~~~~~~~~~~~^
|
||||
/// ty position = 3
|
||||
///
|
||||
/// <Vec<T>>::AssociatedItem
|
||||
/// ^~~~~~ ^
|
||||
/// ty position = 0
|
||||
/// ```
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct QSelf {
|
||||
pub lt_token: Token![<],
|
||||
pub ty: Box<Type>,
|
||||
pub position: usize,
|
||||
pub as_token: Option<Token![as]>,
|
||||
pub gt_token: Token![>],
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::error::Result;
|
||||
#[cfg(feature = "full")]
|
||||
use crate::expr::ExprBlock;
|
||||
use crate::expr::{Expr, ExprPath};
|
||||
use crate::ext::IdentExt as _;
|
||||
#[cfg(feature = "full")]
|
||||
use crate::generics::TypeParamBound;
|
||||
use crate::ident::Ident;
|
||||
use crate::lifetime::Lifetime;
|
||||
use crate::lit::Lit;
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
#[cfg(feature = "full")]
|
||||
use crate::path::Constraint;
|
||||
use crate::path::{
|
||||
AngleBracketedGenericArguments, AssocConst, AssocType, GenericArgument,
|
||||
ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
|
||||
};
|
||||
use crate::punctuated::Punctuated;
|
||||
use crate::token;
|
||||
use crate::ty::{ReturnType, Type};
|
||||
#[cfg(not(feature = "full"))]
|
||||
use crate::verbatim;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Path {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse_helper(input, false)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for GenericArgument {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Lifetime) && !input.peek2(Token![+]) {
|
||||
return Ok(GenericArgument::Lifetime(input.parse()?));
|
||||
}
|
||||
|
||||
if input.peek(Lit) || input.peek(token::Brace) {
|
||||
return const_argument(input).map(GenericArgument::Const);
|
||||
}
|
||||
|
||||
let mut argument: Type = input.parse()?;
|
||||
|
||||
match argument {
|
||||
Type::Path(mut ty)
|
||||
if ty.qself.is_none()
|
||||
&& ty.path.leading_colon.is_none()
|
||||
&& ty.path.segments.len() == 1
|
||||
&& match &ty.path.segments[0].arguments {
|
||||
PathArguments::None | PathArguments::AngleBracketed(_) => true,
|
||||
PathArguments::Parenthesized(_) => false,
|
||||
} =>
|
||||
{
|
||||
if let Some(eq_token) = input.parse::<Option<Token![=]>>()? {
|
||||
let segment = ty.path.segments.pop().unwrap().into_value();
|
||||
let ident = segment.ident;
|
||||
let generics = match segment.arguments {
|
||||
PathArguments::None => None,
|
||||
PathArguments::AngleBracketed(arguments) => Some(arguments),
|
||||
PathArguments::Parenthesized(_) => unreachable!(),
|
||||
};
|
||||
return if input.peek(Lit) || input.peek(token::Brace) {
|
||||
Ok(GenericArgument::AssocConst(AssocConst {
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
value: const_argument(input)?,
|
||||
}))
|
||||
} else {
|
||||
Ok(GenericArgument::AssocType(AssocType {
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty: input.parse()?,
|
||||
}))
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
if let Some(colon_token) = input.parse::<Option<Token![:]>>()? {
|
||||
let segment = ty.path.segments.pop().unwrap().into_value();
|
||||
return Ok(GenericArgument::Constraint(Constraint {
|
||||
ident: segment.ident,
|
||||
generics: match segment.arguments {
|
||||
PathArguments::None => None,
|
||||
PathArguments::AngleBracketed(arguments) => Some(arguments),
|
||||
PathArguments::Parenthesized(_) => unreachable!(),
|
||||
},
|
||||
colon_token,
|
||||
bounds: {
|
||||
let mut bounds = Punctuated::new();
|
||||
loop {
|
||||
if input.peek(Token![,]) || input.peek(Token![>]) {
|
||||
break;
|
||||
}
|
||||
bounds.push_value({
|
||||
let allow_precise_capture = false;
|
||||
let allow_const = true;
|
||||
TypeParamBound::parse_single(
|
||||
input,
|
||||
allow_precise_capture,
|
||||
allow_const,
|
||||
)?
|
||||
});
|
||||
if !input.peek(Token![+]) {
|
||||
break;
|
||||
}
|
||||
let punct: Token![+] = input.parse()?;
|
||||
bounds.push_punct(punct);
|
||||
}
|
||||
bounds
|
||||
},
|
||||
}));
|
||||
}
|
||||
|
||||
argument = Type::Path(ty);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(GenericArgument::Type(argument))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn const_argument(input: ParseStream) -> Result<Expr> {
|
||||
let lookahead = input.lookahead1();
|
||||
|
||||
if input.peek(Lit) {
|
||||
let lit = input.parse()?;
|
||||
return Ok(Expr::Lit(lit));
|
||||
}
|
||||
|
||||
if input.peek(Ident) {
|
||||
let ident: Ident = input.parse()?;
|
||||
return Ok(Expr::Path(ExprPath {
|
||||
attrs: Vec::new(),
|
||||
qself: None,
|
||||
path: Path::from(ident),
|
||||
}));
|
||||
}
|
||||
|
||||
if input.peek(token::Brace) {
|
||||
#[cfg(feature = "full")]
|
||||
{
|
||||
let block: ExprBlock = input.parse()?;
|
||||
return Ok(Expr::Block(block));
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
{
|
||||
let begin = input.fork();
|
||||
let content;
|
||||
braced!(content in input);
|
||||
content.parse::<Expr>()?;
|
||||
let verbatim = verbatim::between(&begin, input);
|
||||
return Ok(Expr::Verbatim(verbatim));
|
||||
}
|
||||
}
|
||||
|
||||
Err(lookahead.error())
|
||||
}
|
||||
|
||||
impl AngleBracketedGenericArguments {
|
||||
/// Parse `::<…>` with mandatory leading `::`.
|
||||
///
|
||||
/// The ordinary [`Parse`] impl for `AngleBracketedGenericArguments`
|
||||
/// parses optional leading `::`.
|
||||
#[cfg(feature = "full")]
|
||||
#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "full"))))]
|
||||
pub fn parse_turbofish(input: ParseStream) -> Result<Self> {
|
||||
let colon2_token: Token![::] = input.parse()?;
|
||||
Self::do_parse(Some(colon2_token), input)
|
||||
}
|
||||
|
||||
pub(crate) fn do_parse(
|
||||
colon2_token: Option<Token![::]>,
|
||||
input: ParseStream,
|
||||
) -> Result<Self> {
|
||||
Ok(AngleBracketedGenericArguments {
|
||||
colon2_token,
|
||||
lt_token: input.parse()?,
|
||||
args: {
|
||||
let mut args = Punctuated::new();
|
||||
loop {
|
||||
if input.peek(Token![>]) {
|
||||
break;
|
||||
}
|
||||
let value: GenericArgument = input.parse()?;
|
||||
args.push_value(value);
|
||||
if input.peek(Token![>]) {
|
||||
break;
|
||||
}
|
||||
let punct: Token![,] = input.parse()?;
|
||||
args.push_punct(punct);
|
||||
}
|
||||
args
|
||||
},
|
||||
gt_token: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for AngleBracketedGenericArguments {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let colon2_token: Option<Token![::]> = input.parse()?;
|
||||
Self::do_parse(colon2_token, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for ParenthesizedGenericArguments {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(ParenthesizedGenericArguments {
|
||||
paren_token: parenthesized!(content in input),
|
||||
inputs: content.parse_terminated(Type::parse, Token![,])?,
|
||||
output: input.call(ReturnType::without_plus)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for PathSegment {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse_helper(input, false)
|
||||
}
|
||||
}
|
||||
|
||||
impl PathSegment {
|
||||
fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
|
||||
if input.peek(Token![super])
|
||||
|| input.peek(Token![self])
|
||||
|| input.peek(Token![crate])
|
||||
|| cfg!(feature = "full") && input.peek(Token![try])
|
||||
{
|
||||
let ident = input.call(Ident::parse_any)?;
|
||||
return Ok(PathSegment::from(ident));
|
||||
}
|
||||
|
||||
let ident = if input.peek(Token![Self]) {
|
||||
input.call(Ident::parse_any)?
|
||||
} else {
|
||||
input.parse()?
|
||||
};
|
||||
|
||||
if !expr_style
|
||||
&& input.peek(Token![<])
|
||||
&& !input.peek(Token![<=])
|
||||
&& !input.peek(Token![<<=])
|
||||
|| input.peek(Token![::]) && input.peek3(Token![<])
|
||||
{
|
||||
Ok(PathSegment {
|
||||
ident,
|
||||
arguments: PathArguments::AngleBracketed(input.parse()?),
|
||||
})
|
||||
} else {
|
||||
Ok(PathSegment::from(ident))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Path {
|
||||
/// Parse a `Path` containing no path arguments on any of its segments.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Path, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // A simplified single `use` statement like:
|
||||
/// //
|
||||
/// // use std::collections::HashMap;
|
||||
/// //
|
||||
/// // Note that generic parameters are not allowed in a `use` statement
|
||||
/// // so the following must not be accepted.
|
||||
/// //
|
||||
/// // use a::<b>::c;
|
||||
/// struct SingleUse {
|
||||
/// use_token: Token![use],
|
||||
/// path: Path,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for SingleUse {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// Ok(SingleUse {
|
||||
/// use_token: input.parse()?,
|
||||
/// path: input.call(Path::parse_mod_style)?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_mod_style(input: ParseStream) -> Result<Self> {
|
||||
Ok(Path {
|
||||
leading_colon: input.parse()?,
|
||||
segments: {
|
||||
let mut segments = Punctuated::new();
|
||||
loop {
|
||||
if !input.peek(Ident)
|
||||
&& !input.peek(Token![super])
|
||||
&& !input.peek(Token![self])
|
||||
&& !input.peek(Token![Self])
|
||||
&& !input.peek(Token![crate])
|
||||
{
|
||||
break;
|
||||
}
|
||||
let ident = Ident::parse_any(input)?;
|
||||
segments.push_value(PathSegment::from(ident));
|
||||
if !input.peek(Token![::]) {
|
||||
break;
|
||||
}
|
||||
let punct = input.parse()?;
|
||||
segments.push_punct(punct);
|
||||
}
|
||||
if segments.is_empty() {
|
||||
return Err(input.parse::<Ident>().unwrap_err());
|
||||
} else if segments.trailing_punct() {
|
||||
return Err(input.error("expected path segment after `::`"));
|
||||
}
|
||||
segments
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
|
||||
let mut path = Path {
|
||||
leading_colon: input.parse()?,
|
||||
segments: {
|
||||
let mut segments = Punctuated::new();
|
||||
let value = PathSegment::parse_helper(input, expr_style)?;
|
||||
segments.push_value(value);
|
||||
segments
|
||||
},
|
||||
};
|
||||
Path::parse_rest(input, &mut path, expr_style)?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub(crate) fn parse_rest(
|
||||
input: ParseStream,
|
||||
path: &mut Self,
|
||||
expr_style: bool,
|
||||
) -> Result<()> {
|
||||
while input.peek(Token![::]) && !input.peek3(token::Paren) {
|
||||
let punct: Token![::] = input.parse()?;
|
||||
path.segments.push_punct(punct);
|
||||
let value = PathSegment::parse_helper(input, expr_style)?;
|
||||
path.segments.push_value(value);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn is_mod_style(&self) -> bool {
|
||||
self.segments
|
||||
.iter()
|
||||
.all(|segment| segment.arguments.is_none())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn qpath(input: ParseStream, expr_style: bool) -> Result<(Option<QSelf>, Path)> {
|
||||
if input.peek(Token![<]) {
|
||||
let lt_token: Token![<] = input.parse()?;
|
||||
let this: Type = input.parse()?;
|
||||
let path = if input.peek(Token![as]) {
|
||||
let as_token: Token![as] = input.parse()?;
|
||||
let path: Path = input.parse()?;
|
||||
Some((as_token, path))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let gt_token: Token![>] = input.parse()?;
|
||||
let colon2_token: Token![::] = input.parse()?;
|
||||
let mut rest = Punctuated::new();
|
||||
loop {
|
||||
let path = PathSegment::parse_helper(input, expr_style)?;
|
||||
rest.push_value(path);
|
||||
if !input.peek(Token![::]) {
|
||||
break;
|
||||
}
|
||||
let punct: Token![::] = input.parse()?;
|
||||
rest.push_punct(punct);
|
||||
}
|
||||
let (position, as_token, path) = match path {
|
||||
Some((as_token, mut path)) => {
|
||||
let pos = path.segments.len();
|
||||
path.segments.push_punct(colon2_token);
|
||||
path.segments.extend(rest.into_pairs());
|
||||
(pos, Some(as_token), path)
|
||||
}
|
||||
None => {
|
||||
let path = Path {
|
||||
leading_colon: Some(colon2_token),
|
||||
segments: rest,
|
||||
};
|
||||
(0, None, path)
|
||||
}
|
||||
};
|
||||
let qself = QSelf {
|
||||
lt_token,
|
||||
ty: Box::new(this),
|
||||
position,
|
||||
as_token,
|
||||
gt_token,
|
||||
};
|
||||
Ok((Some(qself), path))
|
||||
} else {
|
||||
let path = Path::parse_helper(input, expr_style)?;
|
||||
Ok((None, path))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
pub(crate) mod printing {
|
||||
use crate::generics;
|
||||
use crate::path::{
|
||||
AngleBracketedGenericArguments, AssocConst, AssocType, Constraint, GenericArgument,
|
||||
ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
|
||||
};
|
||||
use crate::print::TokensOrDefault;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::spanned::Spanned;
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::Span;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
use std::cmp;
|
||||
|
||||
pub(crate) enum PathStyle {
|
||||
Expr,
|
||||
Mod,
|
||||
AsWritten,
|
||||
}
|
||||
|
||||
impl Copy for PathStyle {}
|
||||
|
||||
impl Clone for PathStyle {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Path {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
print_path(tokens, self, PathStyle::AsWritten);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn print_path(tokens: &mut TokenStream, path: &Path, style: PathStyle) {
|
||||
path.leading_colon.to_tokens(tokens);
|
||||
for segment in path.segments.pairs() {
|
||||
print_path_segment(tokens, segment.value(), style);
|
||||
segment.punct().to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PathSegment {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
print_path_segment(tokens, self, PathStyle::AsWritten);
|
||||
}
|
||||
}
|
||||
|
||||
fn print_path_segment(tokens: &mut TokenStream, segment: &PathSegment, style: PathStyle) {
|
||||
segment.ident.to_tokens(tokens);
|
||||
print_path_arguments(tokens, &segment.arguments, style);
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PathArguments {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
print_path_arguments(tokens, self, PathStyle::AsWritten);
|
||||
}
|
||||
}
|
||||
|
||||
fn print_path_arguments(tokens: &mut TokenStream, arguments: &PathArguments, style: PathStyle) {
|
||||
match arguments {
|
||||
PathArguments::None => {}
|
||||
PathArguments::AngleBracketed(arguments) => {
|
||||
print_angle_bracketed_generic_arguments(tokens, arguments, style);
|
||||
}
|
||||
PathArguments::Parenthesized(arguments) => {
|
||||
print_parenthesized_generic_arguments(tokens, arguments, style);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for GenericArgument {
|
||||
#[allow(clippy::match_same_arms)]
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
GenericArgument::Lifetime(lt) => lt.to_tokens(tokens),
|
||||
GenericArgument::Type(ty) => ty.to_tokens(tokens),
|
||||
GenericArgument::Const(expr) => {
|
||||
generics::printing::print_const_argument(expr, tokens);
|
||||
}
|
||||
GenericArgument::AssocType(assoc) => assoc.to_tokens(tokens),
|
||||
GenericArgument::AssocConst(assoc) => assoc.to_tokens(tokens),
|
||||
GenericArgument::Constraint(constraint) => constraint.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for AngleBracketedGenericArguments {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
print_angle_bracketed_generic_arguments(tokens, self, PathStyle::AsWritten);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn print_angle_bracketed_generic_arguments(
|
||||
tokens: &mut TokenStream,
|
||||
arguments: &AngleBracketedGenericArguments,
|
||||
style: PathStyle,
|
||||
) {
|
||||
if let PathStyle::Mod = style {
|
||||
return;
|
||||
}
|
||||
|
||||
conditionally_print_turbofish(tokens, &arguments.colon2_token, style);
|
||||
arguments.lt_token.to_tokens(tokens);
|
||||
|
||||
// Print lifetimes before types/consts/bindings, regardless of their
|
||||
// order in args.
|
||||
let mut trailing_or_empty = true;
|
||||
for param in arguments.args.pairs() {
|
||||
match param.value() {
|
||||
GenericArgument::Lifetime(_) => {
|
||||
param.to_tokens(tokens);
|
||||
trailing_or_empty = param.punct().is_some();
|
||||
}
|
||||
GenericArgument::Type(_)
|
||||
| GenericArgument::Const(_)
|
||||
| GenericArgument::AssocType(_)
|
||||
| GenericArgument::AssocConst(_)
|
||||
| GenericArgument::Constraint(_) => {}
|
||||
}
|
||||
}
|
||||
for param in arguments.args.pairs() {
|
||||
match param.value() {
|
||||
GenericArgument::Type(_)
|
||||
| GenericArgument::Const(_)
|
||||
| GenericArgument::AssocType(_)
|
||||
| GenericArgument::AssocConst(_)
|
||||
| GenericArgument::Constraint(_) => {
|
||||
if !trailing_or_empty {
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
param.to_tokens(tokens);
|
||||
trailing_or_empty = param.punct().is_some();
|
||||
}
|
||||
GenericArgument::Lifetime(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
arguments.gt_token.to_tokens(tokens);
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for AssocType {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.generics.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
self.ty.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for AssocConst {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.generics.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
generics::printing::print_const_argument(&self.value, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Constraint {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.generics.to_tokens(tokens);
|
||||
self.colon_token.to_tokens(tokens);
|
||||
self.bounds.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for ParenthesizedGenericArguments {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
print_parenthesized_generic_arguments(tokens, self, PathStyle::AsWritten);
|
||||
}
|
||||
}
|
||||
|
||||
fn print_parenthesized_generic_arguments(
|
||||
tokens: &mut TokenStream,
|
||||
arguments: &ParenthesizedGenericArguments,
|
||||
style: PathStyle,
|
||||
) {
|
||||
if let PathStyle::Mod = style {
|
||||
return;
|
||||
}
|
||||
|
||||
conditionally_print_turbofish(tokens, &None, style);
|
||||
arguments.paren_token.surround(tokens, |tokens| {
|
||||
arguments.inputs.to_tokens(tokens);
|
||||
});
|
||||
arguments.output.to_tokens(tokens);
|
||||
}
|
||||
|
||||
pub(crate) fn print_qpath(
|
||||
tokens: &mut TokenStream,
|
||||
qself: &Option<QSelf>,
|
||||
path: &Path,
|
||||
style: PathStyle,
|
||||
) {
|
||||
let qself = match qself {
|
||||
Some(qself) => qself,
|
||||
None => {
|
||||
print_path(tokens, path, style);
|
||||
return;
|
||||
}
|
||||
};
|
||||
qself.lt_token.to_tokens(tokens);
|
||||
qself.ty.to_tokens(tokens);
|
||||
|
||||
let pos = cmp::min(qself.position, path.segments.len());
|
||||
let mut segments = path.segments.pairs();
|
||||
if pos > 0 {
|
||||
TokensOrDefault(&qself.as_token).to_tokens(tokens);
|
||||
path.leading_colon.to_tokens(tokens);
|
||||
for (i, segment) in segments.by_ref().take(pos).enumerate() {
|
||||
print_path_segment(tokens, segment.value(), PathStyle::AsWritten);
|
||||
if i + 1 == pos {
|
||||
qself.gt_token.to_tokens(tokens);
|
||||
}
|
||||
segment.punct().to_tokens(tokens);
|
||||
}
|
||||
} else {
|
||||
qself.gt_token.to_tokens(tokens);
|
||||
path.leading_colon.to_tokens(tokens);
|
||||
}
|
||||
for segment in segments {
|
||||
print_path_segment(tokens, segment.value(), style);
|
||||
segment.punct().to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
fn conditionally_print_turbofish(
|
||||
tokens: &mut TokenStream,
|
||||
colon2_token: &Option<Token![::]>,
|
||||
style: PathStyle,
|
||||
) {
|
||||
match style {
|
||||
PathStyle::Expr => TokensOrDefault(colon2_token).to_tokens(tokens),
|
||||
PathStyle::Mod => unreachable!(),
|
||||
PathStyle::AsWritten => colon2_token.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(docsrs, doc(cfg(all(feature = "parsing", feature = "printing"))))]
|
||||
impl Spanned for QSelf {
|
||||
fn span(&self) -> Span {
|
||||
struct QSelfDelimiters<'a>(&'a QSelf);
|
||||
|
||||
impl<'a> ToTokens for QSelfDelimiters<'a> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.0.lt_token.to_tokens(tokens);
|
||||
self.0.gt_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
QSelfDelimiters(self).span()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,212 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg(all(feature = "printing", feature = "full"))]
|
||||
use crate::attr::{AttrStyle, Attribute};
|
||||
#[cfg(feature = "printing")]
|
||||
use crate::expr::Expr;
|
||||
#[cfg(all(feature = "printing", feature = "full"))]
|
||||
use crate::expr::{
|
||||
ExprArray, ExprAsync, ExprAwait, ExprBlock, ExprBreak, ExprCall, ExprConst, ExprContinue,
|
||||
ExprField, ExprForLoop, ExprGroup, ExprIf, ExprIndex, ExprInfer, ExprLit, ExprLoop, ExprMacro,
|
||||
ExprMatch, ExprMethodCall, ExprParen, ExprPath, ExprRepeat, ExprReturn, ExprStruct, ExprTry,
|
||||
ExprTryBlock, ExprTuple, ExprUnsafe, ExprWhile, ExprYield,
|
||||
};
|
||||
use crate::op::BinOp;
|
||||
#[cfg(all(feature = "printing", feature = "full"))]
|
||||
use crate::ty::ReturnType;
|
||||
use std::cmp::Ordering;
|
||||
|
||||
// Reference: https://doc.rust-lang.org/reference/expressions.html#expression-precedence
|
||||
pub(crate) enum Precedence {
|
||||
// return, break, closures
|
||||
Jump,
|
||||
// = += -= *= /= %= &= |= ^= <<= >>=
|
||||
Assign,
|
||||
// .. ..=
|
||||
Range,
|
||||
// ||
|
||||
Or,
|
||||
// &&
|
||||
And,
|
||||
// let
|
||||
#[cfg(feature = "printing")]
|
||||
Let,
|
||||
// == != < > <= >=
|
||||
Compare,
|
||||
// |
|
||||
BitOr,
|
||||
// ^
|
||||
BitXor,
|
||||
// &
|
||||
BitAnd,
|
||||
// << >>
|
||||
Shift,
|
||||
// + -
|
||||
Sum,
|
||||
// * / %
|
||||
Product,
|
||||
// as
|
||||
Cast,
|
||||
// unary - * ! & &mut
|
||||
#[cfg(feature = "printing")]
|
||||
Prefix,
|
||||
// paths, loops, function calls, array indexing, field expressions, method calls
|
||||
#[cfg(feature = "printing")]
|
||||
Unambiguous,
|
||||
}
|
||||
|
||||
impl Precedence {
|
||||
pub(crate) const MIN: Self = Precedence::Jump;
|
||||
|
||||
pub(crate) fn of_binop(op: &BinOp) -> Self {
|
||||
match op {
|
||||
BinOp::Add(_) | BinOp::Sub(_) => Precedence::Sum,
|
||||
BinOp::Mul(_) | BinOp::Div(_) | BinOp::Rem(_) => Precedence::Product,
|
||||
BinOp::And(_) => Precedence::And,
|
||||
BinOp::Or(_) => Precedence::Or,
|
||||
BinOp::BitXor(_) => Precedence::BitXor,
|
||||
BinOp::BitAnd(_) => Precedence::BitAnd,
|
||||
BinOp::BitOr(_) => Precedence::BitOr,
|
||||
BinOp::Shl(_) | BinOp::Shr(_) => Precedence::Shift,
|
||||
|
||||
BinOp::Eq(_)
|
||||
| BinOp::Lt(_)
|
||||
| BinOp::Le(_)
|
||||
| BinOp::Ne(_)
|
||||
| BinOp::Ge(_)
|
||||
| BinOp::Gt(_) => Precedence::Compare,
|
||||
|
||||
BinOp::AddAssign(_)
|
||||
| BinOp::SubAssign(_)
|
||||
| BinOp::MulAssign(_)
|
||||
| BinOp::DivAssign(_)
|
||||
| BinOp::RemAssign(_)
|
||||
| BinOp::BitXorAssign(_)
|
||||
| BinOp::BitAndAssign(_)
|
||||
| BinOp::BitOrAssign(_)
|
||||
| BinOp::ShlAssign(_)
|
||||
| BinOp::ShrAssign(_) => Precedence::Assign,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
pub(crate) fn of(e: &Expr) -> Self {
|
||||
#[cfg(feature = "full")]
|
||||
fn prefix_attrs(attrs: &[Attribute]) -> Precedence {
|
||||
for attr in attrs {
|
||||
if let AttrStyle::Outer = attr.style {
|
||||
return Precedence::Prefix;
|
||||
}
|
||||
}
|
||||
Precedence::Unambiguous
|
||||
}
|
||||
|
||||
match e {
|
||||
#[cfg(feature = "full")]
|
||||
Expr::Closure(e) => match e.output {
|
||||
ReturnType::Default => Precedence::Jump,
|
||||
ReturnType::Type(..) => prefix_attrs(&e.attrs),
|
||||
},
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
Expr::Break(ExprBreak { expr, .. })
|
||||
| Expr::Return(ExprReturn { expr, .. })
|
||||
| Expr::Yield(ExprYield { expr, .. }) => match expr {
|
||||
Some(_) => Precedence::Jump,
|
||||
None => Precedence::Unambiguous,
|
||||
},
|
||||
|
||||
Expr::Assign(_) => Precedence::Assign,
|
||||
Expr::Range(_) => Precedence::Range,
|
||||
Expr::Binary(e) => Precedence::of_binop(&e.op),
|
||||
Expr::Let(_) => Precedence::Let,
|
||||
Expr::Cast(_) => Precedence::Cast,
|
||||
Expr::RawAddr(_) | Expr::Reference(_) | Expr::Unary(_) => Precedence::Prefix,
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
Expr::Array(ExprArray { attrs, .. })
|
||||
| Expr::Async(ExprAsync { attrs, .. })
|
||||
| Expr::Await(ExprAwait { attrs, .. })
|
||||
| Expr::Block(ExprBlock { attrs, .. })
|
||||
| Expr::Call(ExprCall { attrs, .. })
|
||||
| Expr::Const(ExprConst { attrs, .. })
|
||||
| Expr::Continue(ExprContinue { attrs, .. })
|
||||
| Expr::Field(ExprField { attrs, .. })
|
||||
| Expr::ForLoop(ExprForLoop { attrs, .. })
|
||||
| Expr::Group(ExprGroup { attrs, .. })
|
||||
| Expr::If(ExprIf { attrs, .. })
|
||||
| Expr::Index(ExprIndex { attrs, .. })
|
||||
| Expr::Infer(ExprInfer { attrs, .. })
|
||||
| Expr::Lit(ExprLit { attrs, .. })
|
||||
| Expr::Loop(ExprLoop { attrs, .. })
|
||||
| Expr::Macro(ExprMacro { attrs, .. })
|
||||
| Expr::Match(ExprMatch { attrs, .. })
|
||||
| Expr::MethodCall(ExprMethodCall { attrs, .. })
|
||||
| Expr::Paren(ExprParen { attrs, .. })
|
||||
| Expr::Path(ExprPath { attrs, .. })
|
||||
| Expr::Repeat(ExprRepeat { attrs, .. })
|
||||
| Expr::Struct(ExprStruct { attrs, .. })
|
||||
| Expr::Try(ExprTry { attrs, .. })
|
||||
| Expr::TryBlock(ExprTryBlock { attrs, .. })
|
||||
| Expr::Tuple(ExprTuple { attrs, .. })
|
||||
| Expr::Unsafe(ExprUnsafe { attrs, .. })
|
||||
| Expr::While(ExprWhile { attrs, .. }) => prefix_attrs(attrs),
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
Expr::Array(_)
|
||||
| Expr::Async(_)
|
||||
| Expr::Await(_)
|
||||
| Expr::Block(_)
|
||||
| Expr::Call(_)
|
||||
| Expr::Const(_)
|
||||
| Expr::Continue(_)
|
||||
| Expr::Field(_)
|
||||
| Expr::ForLoop(_)
|
||||
| Expr::Group(_)
|
||||
| Expr::If(_)
|
||||
| Expr::Index(_)
|
||||
| Expr::Infer(_)
|
||||
| Expr::Lit(_)
|
||||
| Expr::Loop(_)
|
||||
| Expr::Macro(_)
|
||||
| Expr::Match(_)
|
||||
| Expr::MethodCall(_)
|
||||
| Expr::Paren(_)
|
||||
| Expr::Path(_)
|
||||
| Expr::Repeat(_)
|
||||
| Expr::Struct(_)
|
||||
| Expr::Try(_)
|
||||
| Expr::TryBlock(_)
|
||||
| Expr::Tuple(_)
|
||||
| Expr::Unsafe(_)
|
||||
| Expr::While(_) => Precedence::Unambiguous,
|
||||
|
||||
Expr::Verbatim(_) => Precedence::Unambiguous,
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
Expr::Break(_) | Expr::Closure(_) | Expr::Return(_) | Expr::Yield(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Copy for Precedence {}
|
||||
|
||||
impl Clone for Precedence {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Precedence {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
*self as u8 == *other as u8
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Precedence {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
let this = *self as u8;
|
||||
let other = *other as u8;
|
||||
Some(this.cmp(&other))
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
pub(crate) struct TokensOrDefault<'a, T: 'a>(pub &'a Option<T>);
|
||||
|
||||
impl<'a, T> ToTokens for TokensOrDefault<'a, T>
|
||||
where
|
||||
T: ToTokens + Default,
|
||||
{
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self.0 {
|
||||
Some(t) => t.to_tokens(tokens),
|
||||
None => T::default().to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,180 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::path::Path;
|
||||
use crate::token;
|
||||
|
||||
ast_enum! {
|
||||
/// The visibility level of an item: inherited or `pub` or
|
||||
/// `pub(restricted)`.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: crate::expr::Expr#syntax-tree-enums
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Visibility {
|
||||
/// A public visibility level: `pub`.
|
||||
Public(Token![pub]),
|
||||
|
||||
/// A visibility level restricted to some path: `pub(self)` or
|
||||
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
|
||||
Restricted(VisRestricted),
|
||||
|
||||
/// An inherited visibility, which usually means private.
|
||||
Inherited,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A visibility level restricted to some path: `pub(self)` or
|
||||
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct VisRestricted {
|
||||
pub pub_token: Token![pub],
|
||||
pub paren_token: token::Paren,
|
||||
pub in_token: Option<Token![in]>,
|
||||
pub path: Box<Path>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// Unused, but reserved for RFC 3323 restrictions.
|
||||
#[cfg_attr(docsrs, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
#[non_exhaustive]
|
||||
pub enum FieldMutability {
|
||||
None,
|
||||
|
||||
// TODO: https://rust-lang.github.io/rfcs/3323-restrictions.html
|
||||
//
|
||||
// FieldMutability::Restricted(MutRestricted)
|
||||
//
|
||||
// pub struct MutRestricted {
|
||||
// pub mut_token: Token![mut],
|
||||
// pub paren_token: token::Paren,
|
||||
// pub in_token: Option<Token![in]>,
|
||||
// pub path: Box<Path>,
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::error::Result;
|
||||
use crate::ext::IdentExt as _;
|
||||
use crate::ident::Ident;
|
||||
use crate::parse::discouraged::Speculative as _;
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
use crate::path::Path;
|
||||
use crate::restriction::{VisRestricted, Visibility};
|
||||
use crate::token;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Visibility {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
// Recognize an empty None-delimited group, as produced by a $:vis
|
||||
// matcher that matched no tokens.
|
||||
if input.peek(token::Group) {
|
||||
let ahead = input.fork();
|
||||
let group = crate::group::parse_group(&ahead)?;
|
||||
if group.content.is_empty() {
|
||||
input.advance_to(&ahead);
|
||||
return Ok(Visibility::Inherited);
|
||||
}
|
||||
}
|
||||
|
||||
if input.peek(Token![pub]) {
|
||||
Self::parse_pub(input)
|
||||
} else {
|
||||
Ok(Visibility::Inherited)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visibility {
|
||||
fn parse_pub(input: ParseStream) -> Result<Self> {
|
||||
let pub_token = input.parse::<Token![pub]>()?;
|
||||
|
||||
if input.peek(token::Paren) {
|
||||
let ahead = input.fork();
|
||||
|
||||
let content;
|
||||
let paren_token = parenthesized!(content in ahead);
|
||||
if content.peek(Token![crate])
|
||||
|| content.peek(Token![self])
|
||||
|| content.peek(Token![super])
|
||||
{
|
||||
let path = content.call(Ident::parse_any)?;
|
||||
|
||||
// Ensure there are no additional tokens within `content`.
|
||||
// Without explicitly checking, we may misinterpret a tuple
|
||||
// field as a restricted visibility, causing a parse error.
|
||||
// e.g. `pub (crate::A, crate::B)` (Issue #720).
|
||||
if content.is_empty() {
|
||||
input.advance_to(&ahead);
|
||||
return Ok(Visibility::Restricted(VisRestricted {
|
||||
pub_token,
|
||||
paren_token,
|
||||
in_token: None,
|
||||
path: Box::new(Path::from(path)),
|
||||
}));
|
||||
}
|
||||
} else if content.peek(Token![in]) {
|
||||
let in_token: Token![in] = content.parse()?;
|
||||
let path = content.call(Path::parse_mod_style)?;
|
||||
|
||||
input.advance_to(&ahead);
|
||||
return Ok(Visibility::Restricted(VisRestricted {
|
||||
pub_token,
|
||||
paren_token,
|
||||
in_token: Some(in_token),
|
||||
path: Box::new(path),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Visibility::Public(pub_token))
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
pub(crate) fn is_some(&self) -> bool {
|
||||
match self {
|
||||
Visibility::Inherited => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use crate::path;
|
||||
use crate::path::printing::PathStyle;
|
||||
use crate::restriction::{VisRestricted, Visibility};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Visibility {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
Visibility::Public(pub_token) => pub_token.to_tokens(tokens),
|
||||
Visibility::Restricted(vis_restricted) => vis_restricted.to_tokens(tokens),
|
||||
Visibility::Inherited => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for VisRestricted {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pub_token.to_tokens(tokens);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
// TODO: If we have a path which is not "self" or "super" or
|
||||
// "crate", automatically add the "in" token.
|
||||
self.in_token.to_tokens(tokens);
|
||||
path::printing::print_path(tokens, &self.path, PathStyle::Mod);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,267 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use self::{Action::*, Input::*};
|
||||
use proc_macro2::{Delimiter, Ident, Spacing, TokenTree};
|
||||
use syn::parse::{ParseStream, Result};
|
||||
use syn::{AngleBracketedGenericArguments, BinOp, Expr, ExprPath, Lifetime, Lit, Token, Type};
|
||||
|
||||
enum Input {
|
||||
Keyword(&'static str),
|
||||
Punct(&'static str),
|
||||
ConsumeAny,
|
||||
ConsumeBinOp,
|
||||
ConsumeBrace,
|
||||
ConsumeDelimiter,
|
||||
ConsumeIdent,
|
||||
ConsumeLifetime,
|
||||
ConsumeLiteral,
|
||||
ConsumeNestedBrace,
|
||||
ExpectPath,
|
||||
ExpectTurbofish,
|
||||
ExpectType,
|
||||
CanBeginExpr,
|
||||
Otherwise,
|
||||
Empty,
|
||||
}
|
||||
|
||||
enum Action {
|
||||
SetState(&'static [(Input, Action)]),
|
||||
IncDepth,
|
||||
DecDepth,
|
||||
Finish,
|
||||
}
|
||||
|
||||
static INIT: [(Input, Action); 28] = [
|
||||
(ConsumeDelimiter, SetState(&POSTFIX)),
|
||||
(Keyword("async"), SetState(&ASYNC)),
|
||||
(Keyword("break"), SetState(&BREAK_LABEL)),
|
||||
(Keyword("const"), SetState(&CONST)),
|
||||
(Keyword("continue"), SetState(&CONTINUE)),
|
||||
(Keyword("for"), SetState(&FOR)),
|
||||
(Keyword("if"), IncDepth),
|
||||
(Keyword("let"), SetState(&PATTERN)),
|
||||
(Keyword("loop"), SetState(&BLOCK)),
|
||||
(Keyword("match"), IncDepth),
|
||||
(Keyword("move"), SetState(&CLOSURE)),
|
||||
(Keyword("return"), SetState(&RETURN)),
|
||||
(Keyword("static"), SetState(&CLOSURE)),
|
||||
(Keyword("unsafe"), SetState(&BLOCK)),
|
||||
(Keyword("while"), IncDepth),
|
||||
(Keyword("yield"), SetState(&RETURN)),
|
||||
(Keyword("_"), SetState(&POSTFIX)),
|
||||
(Punct("!"), SetState(&INIT)),
|
||||
(Punct("#"), SetState(&[(ConsumeDelimiter, SetState(&INIT))])),
|
||||
(Punct("&"), SetState(&REFERENCE)),
|
||||
(Punct("*"), SetState(&INIT)),
|
||||
(Punct("-"), SetState(&INIT)),
|
||||
(Punct("..="), SetState(&INIT)),
|
||||
(Punct(".."), SetState(&RANGE)),
|
||||
(Punct("|"), SetState(&CLOSURE_ARGS)),
|
||||
(ConsumeLifetime, SetState(&[(Punct(":"), SetState(&INIT))])),
|
||||
(ConsumeLiteral, SetState(&POSTFIX)),
|
||||
(ExpectPath, SetState(&PATH)),
|
||||
];
|
||||
|
||||
static POSTFIX: [(Input, Action); 10] = [
|
||||
(Keyword("as"), SetState(&[(ExpectType, SetState(&POSTFIX))])),
|
||||
(Punct("..="), SetState(&INIT)),
|
||||
(Punct(".."), SetState(&RANGE)),
|
||||
(Punct("."), SetState(&DOT)),
|
||||
(Punct("?"), SetState(&POSTFIX)),
|
||||
(ConsumeBinOp, SetState(&INIT)),
|
||||
(Punct("="), SetState(&INIT)),
|
||||
(ConsumeNestedBrace, SetState(&IF_THEN)),
|
||||
(ConsumeDelimiter, SetState(&POSTFIX)),
|
||||
(Empty, Finish),
|
||||
];
|
||||
|
||||
static ASYNC: [(Input, Action); 3] = [
|
||||
(Keyword("move"), SetState(&ASYNC)),
|
||||
(Punct("|"), SetState(&CLOSURE_ARGS)),
|
||||
(ConsumeBrace, SetState(&POSTFIX)),
|
||||
];
|
||||
|
||||
static BLOCK: [(Input, Action); 1] = [(ConsumeBrace, SetState(&POSTFIX))];
|
||||
|
||||
static BREAK_LABEL: [(Input, Action); 2] = [
|
||||
(ConsumeLifetime, SetState(&BREAK_VALUE)),
|
||||
(Otherwise, SetState(&BREAK_VALUE)),
|
||||
];
|
||||
|
||||
static BREAK_VALUE: [(Input, Action); 3] = [
|
||||
(ConsumeNestedBrace, SetState(&IF_THEN)),
|
||||
(CanBeginExpr, SetState(&INIT)),
|
||||
(Otherwise, SetState(&POSTFIX)),
|
||||
];
|
||||
|
||||
static CLOSURE: [(Input, Action); 7] = [
|
||||
(Keyword("async"), SetState(&CLOSURE)),
|
||||
(Keyword("move"), SetState(&CLOSURE)),
|
||||
(Punct(","), SetState(&CLOSURE)),
|
||||
(Punct(">"), SetState(&CLOSURE)),
|
||||
(Punct("|"), SetState(&CLOSURE_ARGS)),
|
||||
(ConsumeLifetime, SetState(&CLOSURE)),
|
||||
(ConsumeIdent, SetState(&CLOSURE)),
|
||||
];
|
||||
|
||||
static CLOSURE_ARGS: [(Input, Action); 2] = [
|
||||
(Punct("|"), SetState(&CLOSURE_RET)),
|
||||
(ConsumeAny, SetState(&CLOSURE_ARGS)),
|
||||
];
|
||||
|
||||
static CLOSURE_RET: [(Input, Action); 2] = [
|
||||
(Punct("->"), SetState(&[(ExpectType, SetState(&BLOCK))])),
|
||||
(Otherwise, SetState(&INIT)),
|
||||
];
|
||||
|
||||
static CONST: [(Input, Action); 2] = [
|
||||
(Punct("|"), SetState(&CLOSURE_ARGS)),
|
||||
(ConsumeBrace, SetState(&POSTFIX)),
|
||||
];
|
||||
|
||||
static CONTINUE: [(Input, Action); 2] = [
|
||||
(ConsumeLifetime, SetState(&POSTFIX)),
|
||||
(Otherwise, SetState(&POSTFIX)),
|
||||
];
|
||||
|
||||
static DOT: [(Input, Action); 3] = [
|
||||
(Keyword("await"), SetState(&POSTFIX)),
|
||||
(ConsumeIdent, SetState(&METHOD)),
|
||||
(ConsumeLiteral, SetState(&POSTFIX)),
|
||||
];
|
||||
|
||||
static FOR: [(Input, Action); 2] = [
|
||||
(Punct("<"), SetState(&CLOSURE)),
|
||||
(Otherwise, SetState(&PATTERN)),
|
||||
];
|
||||
|
||||
static IF_ELSE: [(Input, Action); 2] = [(Keyword("if"), SetState(&INIT)), (ConsumeBrace, DecDepth)];
|
||||
static IF_THEN: [(Input, Action); 2] =
|
||||
[(Keyword("else"), SetState(&IF_ELSE)), (Otherwise, DecDepth)];
|
||||
|
||||
static METHOD: [(Input, Action); 1] = [(ExpectTurbofish, SetState(&POSTFIX))];
|
||||
|
||||
static PATH: [(Input, Action); 4] = [
|
||||
(Punct("!="), SetState(&INIT)),
|
||||
(Punct("!"), SetState(&INIT)),
|
||||
(ConsumeNestedBrace, SetState(&IF_THEN)),
|
||||
(Otherwise, SetState(&POSTFIX)),
|
||||
];
|
||||
|
||||
static PATTERN: [(Input, Action); 15] = [
|
||||
(ConsumeDelimiter, SetState(&PATTERN)),
|
||||
(Keyword("box"), SetState(&PATTERN)),
|
||||
(Keyword("in"), IncDepth),
|
||||
(Keyword("mut"), SetState(&PATTERN)),
|
||||
(Keyword("ref"), SetState(&PATTERN)),
|
||||
(Keyword("_"), SetState(&PATTERN)),
|
||||
(Punct("!"), SetState(&PATTERN)),
|
||||
(Punct("&"), SetState(&PATTERN)),
|
||||
(Punct("..="), SetState(&PATTERN)),
|
||||
(Punct(".."), SetState(&PATTERN)),
|
||||
(Punct("="), SetState(&INIT)),
|
||||
(Punct("@"), SetState(&PATTERN)),
|
||||
(Punct("|"), SetState(&PATTERN)),
|
||||
(ConsumeLiteral, SetState(&PATTERN)),
|
||||
(ExpectPath, SetState(&PATTERN)),
|
||||
];
|
||||
|
||||
static RANGE: [(Input, Action); 6] = [
|
||||
(Punct("..="), SetState(&INIT)),
|
||||
(Punct(".."), SetState(&RANGE)),
|
||||
(Punct("."), SetState(&DOT)),
|
||||
(ConsumeNestedBrace, SetState(&IF_THEN)),
|
||||
(Empty, Finish),
|
||||
(Otherwise, SetState(&INIT)),
|
||||
];
|
||||
|
||||
static RAW: [(Input, Action); 3] = [
|
||||
(Keyword("const"), SetState(&INIT)),
|
||||
(Keyword("mut"), SetState(&INIT)),
|
||||
(Otherwise, SetState(&POSTFIX)),
|
||||
];
|
||||
|
||||
static REFERENCE: [(Input, Action); 3] = [
|
||||
(Keyword("mut"), SetState(&INIT)),
|
||||
(Keyword("raw"), SetState(&RAW)),
|
||||
(Otherwise, SetState(&INIT)),
|
||||
];
|
||||
|
||||
static RETURN: [(Input, Action); 2] = [
|
||||
(CanBeginExpr, SetState(&INIT)),
|
||||
(Otherwise, SetState(&POSTFIX)),
|
||||
];
|
||||
|
||||
pub(crate) fn scan_expr(input: ParseStream) -> Result<()> {
|
||||
let mut state = INIT.as_slice();
|
||||
let mut depth = 0usize;
|
||||
'table: loop {
|
||||
for rule in state {
|
||||
if match rule.0 {
|
||||
Input::Keyword(expected) => input.step(|cursor| match cursor.ident() {
|
||||
Some((ident, rest)) if ident == expected => Ok((true, rest)),
|
||||
_ => Ok((false, *cursor)),
|
||||
})?,
|
||||
Input::Punct(expected) => input.step(|cursor| {
|
||||
let begin = *cursor;
|
||||
let mut cursor = begin;
|
||||
for (i, ch) in expected.chars().enumerate() {
|
||||
match cursor.punct() {
|
||||
Some((punct, _)) if punct.as_char() != ch => break,
|
||||
Some((_, rest)) if i == expected.len() - 1 => {
|
||||
return Ok((true, rest));
|
||||
}
|
||||
Some((punct, rest)) if punct.spacing() == Spacing::Joint => {
|
||||
cursor = rest;
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
Ok((false, begin))
|
||||
})?,
|
||||
Input::ConsumeAny => input.parse::<Option<TokenTree>>()?.is_some(),
|
||||
Input::ConsumeBinOp => input.parse::<BinOp>().is_ok(),
|
||||
Input::ConsumeBrace | Input::ConsumeNestedBrace => {
|
||||
(matches!(rule.0, Input::ConsumeBrace) || depth > 0)
|
||||
&& input.step(|cursor| match cursor.group(Delimiter::Brace) {
|
||||
Some((_inside, _span, rest)) => Ok((true, rest)),
|
||||
None => Ok((false, *cursor)),
|
||||
})?
|
||||
}
|
||||
Input::ConsumeDelimiter => input.step(|cursor| match cursor.any_group() {
|
||||
Some((_inside, _delimiter, _span, rest)) => Ok((true, rest)),
|
||||
None => Ok((false, *cursor)),
|
||||
})?,
|
||||
Input::ConsumeIdent => input.parse::<Option<Ident>>()?.is_some(),
|
||||
Input::ConsumeLifetime => input.parse::<Option<Lifetime>>()?.is_some(),
|
||||
Input::ConsumeLiteral => input.parse::<Option<Lit>>()?.is_some(),
|
||||
Input::ExpectPath => {
|
||||
input.parse::<ExprPath>()?;
|
||||
true
|
||||
}
|
||||
Input::ExpectTurbofish => {
|
||||
if input.peek(Token![::]) {
|
||||
input.parse::<AngleBracketedGenericArguments>()?;
|
||||
}
|
||||
true
|
||||
}
|
||||
Input::ExpectType => {
|
||||
Type::without_plus(input)?;
|
||||
true
|
||||
}
|
||||
Input::CanBeginExpr => Expr::peek(input),
|
||||
Input::Otherwise => true,
|
||||
Input::Empty => input.is_empty() || input.peek(Token![,]),
|
||||
} {
|
||||
state = match rule.1 {
|
||||
Action::SetState(next) => next,
|
||||
Action::IncDepth => (depth += 1, &INIT).1,
|
||||
Action::DecDepth => (depth -= 1, &POSTFIX).1,
|
||||
Action::Finish => return if depth == 0 { Ok(()) } else { break },
|
||||
};
|
||||
continue 'table;
|
||||
}
|
||||
}
|
||||
return Err(input.error("unsupported expression"));
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod lookahead {
|
||||
pub trait Sealed: Copy {}
|
||||
}
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::{Delimiter, Group, Span, TokenStream};
|
||||
|
||||
#[doc(hidden)]
|
||||
pub trait IntoSpans<S> {
|
||||
fn into_spans(self) -> S;
|
||||
}
|
||||
|
||||
impl IntoSpans<Span> for Span {
|
||||
fn into_spans(self) -> Span {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 1]> for Span {
|
||||
fn into_spans(self) -> [Span; 1] {
|
||||
[self]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 2]> for Span {
|
||||
fn into_spans(self) -> [Span; 2] {
|
||||
[self, self]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 3]> for Span {
|
||||
fn into_spans(self) -> [Span; 3] {
|
||||
[self, self, self]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 1]> for [Span; 1] {
|
||||
fn into_spans(self) -> [Span; 1] {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 2]> for [Span; 2] {
|
||||
fn into_spans(self) -> [Span; 2] {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<[Span; 3]> for [Span; 3] {
|
||||
fn into_spans(self) -> [Span; 3] {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<DelimSpan> for Span {
|
||||
fn into_spans(self) -> DelimSpan {
|
||||
let mut group = Group::new(Delimiter::None, TokenStream::new());
|
||||
group.set_span(self);
|
||||
group.delim_span()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoSpans<DelimSpan> for DelimSpan {
|
||||
fn into_spans(self) -> DelimSpan {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,120 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
//! A trait that can provide the `Span` of the complete contents of a syntax
|
||||
//! tree node.
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! Suppose in a procedural macro we have a [`Type`] that we want to assert
|
||||
//! implements the [`Sync`] trait. Maybe this is the type of one of the fields
|
||||
//! of a struct for which we are deriving a trait implementation, and we need to
|
||||
//! be able to pass a reference to one of those fields across threads.
|
||||
//!
|
||||
//! [`Type`]: crate::Type
|
||||
//! [`Sync`]: std::marker::Sync
|
||||
//!
|
||||
//! If the field type does *not* implement `Sync` as required, we want the
|
||||
//! compiler to report an error pointing out exactly which type it was.
|
||||
//!
|
||||
//! The following macro code takes a variable `ty` of type `Type` and produces a
|
||||
//! static assertion that `Sync` is implemented for that type.
|
||||
//!
|
||||
//! ```
|
||||
//! # extern crate proc_macro;
|
||||
//! #
|
||||
//! use proc_macro::TokenStream;
|
||||
//! use proc_macro2::Span;
|
||||
//! use quote::quote_spanned;
|
||||
//! use syn::Type;
|
||||
//! use syn::spanned::Spanned;
|
||||
//!
|
||||
//! # const IGNORE_TOKENS: &str = stringify! {
|
||||
//! #[proc_macro_derive(MyMacro)]
|
||||
//! # };
|
||||
//! pub fn my_macro(input: TokenStream) -> TokenStream {
|
||||
//! # let ty = get_a_type();
|
||||
//! /* ... */
|
||||
//!
|
||||
//! let assert_sync = quote_spanned! {ty.span()=>
|
||||
//! struct _AssertSync where #ty: Sync;
|
||||
//! };
|
||||
//!
|
||||
//! /* ... */
|
||||
//! # input
|
||||
//! }
|
||||
//! #
|
||||
//! # fn get_a_type() -> Type {
|
||||
//! # unimplemented!()
|
||||
//! # }
|
||||
//! ```
|
||||
//!
|
||||
//! By inserting this `assert_sync` fragment into the output code generated by
|
||||
//! our macro, the user's code will fail to compile if `ty` does not implement
|
||||
//! `Sync`. The errors they would see look like the following.
|
||||
//!
|
||||
//! ```text
|
||||
//! error[E0277]: the trait bound `*const i32: std::marker::Sync` is not satisfied
|
||||
//! --> src/main.rs:10:21
|
||||
//! |
|
||||
//! 10 | bad_field: *const i32,
|
||||
//! | ^^^^^^^^^^ `*const i32` cannot be shared between threads safely
|
||||
//! ```
|
||||
//!
|
||||
//! In this technique, using the `Type`'s span for the error message makes the
|
||||
//! error appear in the correct place underlining the right type.
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Limitations
|
||||
//!
|
||||
//! The underlying [`proc_macro::Span::join`] method is nightly-only. When
|
||||
//! called from within a procedural macro in a nightly compiler, `Spanned` will
|
||||
//! use `join` to produce the intended span. When not using a nightly compiler,
|
||||
//! only the span of the *first token* of the syntax tree node is returned.
|
||||
//!
|
||||
//! In the common case of wanting to use the joined span as the span of a
|
||||
//! `syn::Error`, consider instead using [`syn::Error::new_spanned`] which is
|
||||
//! able to span the error correctly under the complete syntax tree node without
|
||||
//! needing the unstable `join`.
|
||||
//!
|
||||
//! [`syn::Error::new_spanned`]: crate::Error::new_spanned
|
||||
|
||||
use proc_macro2::Span;
|
||||
use quote::spanned::Spanned as ToTokens;
|
||||
|
||||
/// A trait that can provide the `Span` of the complete contents of a syntax
|
||||
/// tree node.
|
||||
///
|
||||
/// This trait is automatically implemented for all types that implement
|
||||
/// [`ToTokens`] from the `quote` crate, as well as for `Span` itself.
|
||||
///
|
||||
/// [`ToTokens`]: quote::ToTokens
|
||||
///
|
||||
/// See the [module documentation] for an example.
|
||||
///
|
||||
/// [module documentation]: self
|
||||
pub trait Spanned: private::Sealed {
|
||||
/// Returns a `Span` covering the complete contents of this syntax tree
|
||||
/// node, or [`Span::call_site()`] if this node is empty.
|
||||
///
|
||||
/// [`Span::call_site()`]: proc_macro2::Span::call_site
|
||||
fn span(&self) -> Span;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> Spanned for T {
|
||||
fn span(&self) -> Span {
|
||||
self.__span()
|
||||
}
|
||||
}
|
||||
|
||||
mod private {
|
||||
use crate::spanned::ToTokens;
|
||||
|
||||
pub trait Sealed {}
|
||||
impl<T: ?Sized + ToTokens> Sealed for T {}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
impl Sealed for crate::QSelf {}
|
||||
}
|
||||
|
|
@ -0,0 +1,486 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::attr::Attribute;
|
||||
use crate::expr::Expr;
|
||||
use crate::item::Item;
|
||||
use crate::mac::Macro;
|
||||
use crate::pat::Pat;
|
||||
use crate::token;
|
||||
|
||||
ast_struct! {
|
||||
/// A braced block containing Rust statements.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct Block {
|
||||
pub brace_token: token::Brace,
|
||||
/// Statements in a block
|
||||
pub stmts: Vec<Stmt>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// A statement, usually ending in a semicolon.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub enum Stmt {
|
||||
/// A local (let) binding.
|
||||
Local(Local),
|
||||
|
||||
/// An item definition.
|
||||
Item(Item),
|
||||
|
||||
/// Expression, with or without trailing semicolon.
|
||||
Expr(Expr, Option<Token![;]>),
|
||||
|
||||
/// A macro invocation in statement position.
|
||||
///
|
||||
/// Syntactically it's ambiguous which other kind of statement this
|
||||
/// macro would expand to. It can be any of local variable (`let`),
|
||||
/// item, or expression.
|
||||
Macro(StmtMacro),
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A local `let` binding: `let x: u64 = s.parse()?;`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct Local {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub let_token: Token![let],
|
||||
pub pat: Pat,
|
||||
pub init: Option<LocalInit>,
|
||||
pub semi_token: Token![;],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// The expression assigned in a local `let` binding, including optional
|
||||
/// diverging `else` block.
|
||||
///
|
||||
/// `LocalInit` represents `= s.parse()?` in `let x: u64 = s.parse()?` and
|
||||
/// `= r else { return }` in `let Ok(x) = r else { return }`.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct LocalInit {
|
||||
pub eq_token: Token![=],
|
||||
pub expr: Box<Expr>,
|
||||
pub diverge: Option<(Token![else], Box<Expr>)>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A macro invocation in statement position.
|
||||
///
|
||||
/// Syntactically it's ambiguous which other kind of statement this macro
|
||||
/// would expand to. It can be any of local variable (`let`), item, or
|
||||
/// expression.
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "full")))]
|
||||
pub struct StmtMacro {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub mac: Macro,
|
||||
pub semi_token: Option<Token![;]>,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub(crate) mod parsing {
|
||||
use crate::attr::Attribute;
|
||||
use crate::classify;
|
||||
use crate::error::Result;
|
||||
use crate::expr::{Expr, ExprBlock, ExprMacro};
|
||||
use crate::ident::Ident;
|
||||
use crate::item;
|
||||
use crate::mac::{self, Macro};
|
||||
use crate::parse::discouraged::Speculative as _;
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
use crate::pat::{Pat, PatType};
|
||||
use crate::path::Path;
|
||||
use crate::stmt::{Block, Local, LocalInit, Stmt, StmtMacro};
|
||||
use crate::token;
|
||||
use crate::ty::Type;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
struct AllowNoSemi(bool);
|
||||
|
||||
impl Block {
|
||||
/// Parse the body of a block as zero or more statements, possibly
|
||||
/// including one trailing expression.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{braced, token, Attribute, Block, Ident, Result, Stmt, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // Parse a function with no generics or parameter list.
|
||||
/// //
|
||||
/// // fn playground {
|
||||
/// // let mut x = 1;
|
||||
/// // x += 1;
|
||||
/// // println!("{}", x);
|
||||
/// // }
|
||||
/// struct MiniFunction {
|
||||
/// attrs: Vec<Attribute>,
|
||||
/// fn_token: Token![fn],
|
||||
/// name: Ident,
|
||||
/// brace_token: token::Brace,
|
||||
/// stmts: Vec<Stmt>,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for MiniFunction {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let outer_attrs = input.call(Attribute::parse_outer)?;
|
||||
/// let fn_token: Token![fn] = input.parse()?;
|
||||
/// let name: Ident = input.parse()?;
|
||||
///
|
||||
/// let content;
|
||||
/// let brace_token = braced!(content in input);
|
||||
/// let inner_attrs = content.call(Attribute::parse_inner)?;
|
||||
/// let stmts = content.call(Block::parse_within)?;
|
||||
///
|
||||
/// Ok(MiniFunction {
|
||||
/// attrs: {
|
||||
/// let mut attrs = outer_attrs;
|
||||
/// attrs.extend(inner_attrs);
|
||||
/// attrs
|
||||
/// },
|
||||
/// fn_token,
|
||||
/// name,
|
||||
/// brace_token,
|
||||
/// stmts,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
|
||||
let mut stmts = Vec::new();
|
||||
loop {
|
||||
while let semi @ Some(_) = input.parse()? {
|
||||
stmts.push(Stmt::Expr(Expr::Verbatim(TokenStream::new()), semi));
|
||||
}
|
||||
if input.is_empty() {
|
||||
break;
|
||||
}
|
||||
let stmt = parse_stmt(input, AllowNoSemi(true))?;
|
||||
let requires_semicolon = match &stmt {
|
||||
Stmt::Expr(stmt, None) => classify::requires_semi_to_be_stmt(stmt),
|
||||
Stmt::Macro(stmt) => {
|
||||
stmt.semi_token.is_none() && !stmt.mac.delimiter.is_brace()
|
||||
}
|
||||
Stmt::Local(_) | Stmt::Item(_) | Stmt::Expr(_, Some(_)) => false,
|
||||
};
|
||||
stmts.push(stmt);
|
||||
if input.is_empty() {
|
||||
break;
|
||||
} else if requires_semicolon {
|
||||
return Err(input.error("unexpected token, expected `;`"));
|
||||
}
|
||||
}
|
||||
Ok(stmts)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Block {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(Block {
|
||||
brace_token: braced!(content in input),
|
||||
stmts: content.call(Block::parse_within)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Stmt {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let allow_nosemi = AllowNoSemi(false);
|
||||
parse_stmt(input, allow_nosemi)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_stmt(input: ParseStream, allow_nosemi: AllowNoSemi) -> Result<Stmt> {
|
||||
let begin = input.fork();
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
|
||||
// brace-style macros; paren and bracket macros get parsed as
|
||||
// expression statements.
|
||||
let ahead = input.fork();
|
||||
let mut is_item_macro = false;
|
||||
if let Ok(path) = ahead.call(Path::parse_mod_style) {
|
||||
if ahead.peek(Token![!]) {
|
||||
if ahead.peek2(Ident) || ahead.peek2(Token![try]) {
|
||||
is_item_macro = true;
|
||||
} else if ahead.peek2(token::Brace)
|
||||
&& !(ahead.peek3(Token![.]) && !ahead.peek3(Token![..])
|
||||
|| ahead.peek3(Token![?]))
|
||||
{
|
||||
input.advance_to(&ahead);
|
||||
return stmt_mac(input, attrs, path).map(Stmt::Macro);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if input.peek(Token![let]) && !input.peek(token::Group) {
|
||||
stmt_local(input, attrs).map(Stmt::Local)
|
||||
} else if input.peek(Token![pub])
|
||||
|| input.peek(Token![crate]) && !input.peek2(Token![::])
|
||||
|| input.peek(Token![extern])
|
||||
|| input.peek(Token![use])
|
||||
|| input.peek(Token![static])
|
||||
&& (input.peek2(Token![mut])
|
||||
|| input.peek2(Ident)
|
||||
&& !(input.peek2(Token![async])
|
||||
&& (input.peek3(Token![move]) || input.peek3(Token![|]))))
|
||||
|| input.peek(Token![const])
|
||||
&& !(input.peek2(token::Brace)
|
||||
|| input.peek2(Token![static])
|
||||
|| input.peek2(Token![async])
|
||||
&& !(input.peek3(Token![unsafe])
|
||||
|| input.peek3(Token![extern])
|
||||
|| input.peek3(Token![fn]))
|
||||
|| input.peek2(Token![move])
|
||||
|| input.peek2(Token![|]))
|
||||
|| input.peek(Token![unsafe]) && !input.peek2(token::Brace)
|
||||
|| input.peek(Token![async])
|
||||
&& (input.peek2(Token![unsafe])
|
||||
|| input.peek2(Token![extern])
|
||||
|| input.peek2(Token![fn]))
|
||||
|| input.peek(Token![fn])
|
||||
|| input.peek(Token![mod])
|
||||
|| input.peek(Token![type])
|
||||
|| input.peek(Token![struct])
|
||||
|| input.peek(Token![enum])
|
||||
|| input.peek(Token![union]) && input.peek2(Ident)
|
||||
|| input.peek(Token![auto]) && input.peek2(Token![trait])
|
||||
|| input.peek(Token![trait])
|
||||
|| input.peek(Token![default])
|
||||
&& (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
|
||||
|| input.peek(Token![impl])
|
||||
|| input.peek(Token![macro])
|
||||
|| is_item_macro
|
||||
{
|
||||
let item = item::parsing::parse_rest_of_item(begin, attrs, input)?;
|
||||
Ok(Stmt::Item(item))
|
||||
} else {
|
||||
stmt_expr(input, allow_nosemi, attrs)
|
||||
}
|
||||
}
|
||||
|
||||
fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<StmtMacro> {
|
||||
let bang_token: Token![!] = input.parse()?;
|
||||
let (delimiter, tokens) = mac::parse_delimiter(input)?;
|
||||
let semi_token: Option<Token![;]> = input.parse()?;
|
||||
|
||||
Ok(StmtMacro {
|
||||
attrs,
|
||||
mac: Macro {
|
||||
path,
|
||||
bang_token,
|
||||
delimiter,
|
||||
tokens,
|
||||
},
|
||||
semi_token,
|
||||
})
|
||||
}
|
||||
|
||||
fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
|
||||
let let_token: Token![let] = input.parse()?;
|
||||
|
||||
let mut pat = Pat::parse_single(input)?;
|
||||
if input.peek(Token![:]) {
|
||||
let colon_token: Token![:] = input.parse()?;
|
||||
let ty: Type = input.parse()?;
|
||||
pat = Pat::Type(PatType {
|
||||
attrs: Vec::new(),
|
||||
pat: Box::new(pat),
|
||||
colon_token,
|
||||
ty: Box::new(ty),
|
||||
});
|
||||
}
|
||||
|
||||
let init = if let Some(eq_token) = input.parse()? {
|
||||
let eq_token: Token![=] = eq_token;
|
||||
let expr: Expr = input.parse()?;
|
||||
|
||||
let diverge = if !classify::expr_trailing_brace(&expr) && input.peek(Token![else]) {
|
||||
let else_token: Token![else] = input.parse()?;
|
||||
let diverge = ExprBlock {
|
||||
attrs: Vec::new(),
|
||||
label: None,
|
||||
block: input.parse()?,
|
||||
};
|
||||
Some((else_token, Box::new(Expr::Block(diverge))))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Some(LocalInit {
|
||||
eq_token,
|
||||
expr: Box::new(expr),
|
||||
diverge,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let semi_token: Token![;] = input.parse()?;
|
||||
|
||||
Ok(Local {
|
||||
attrs,
|
||||
let_token,
|
||||
pat,
|
||||
init,
|
||||
semi_token,
|
||||
})
|
||||
}
|
||||
|
||||
fn stmt_expr(
|
||||
input: ParseStream,
|
||||
allow_nosemi: AllowNoSemi,
|
||||
mut attrs: Vec<Attribute>,
|
||||
) -> Result<Stmt> {
|
||||
let mut e = Expr::parse_with_earlier_boundary_rule(input)?;
|
||||
|
||||
let mut attr_target = &mut e;
|
||||
loop {
|
||||
attr_target = match attr_target {
|
||||
Expr::Assign(e) => &mut e.left,
|
||||
Expr::Binary(e) => &mut e.left,
|
||||
Expr::Cast(e) => &mut e.expr,
|
||||
Expr::Array(_)
|
||||
| Expr::Async(_)
|
||||
| Expr::Await(_)
|
||||
| Expr::Block(_)
|
||||
| Expr::Break(_)
|
||||
| Expr::Call(_)
|
||||
| Expr::Closure(_)
|
||||
| Expr::Const(_)
|
||||
| Expr::Continue(_)
|
||||
| Expr::Field(_)
|
||||
| Expr::ForLoop(_)
|
||||
| Expr::Group(_)
|
||||
| Expr::If(_)
|
||||
| Expr::Index(_)
|
||||
| Expr::Infer(_)
|
||||
| Expr::Let(_)
|
||||
| Expr::Lit(_)
|
||||
| Expr::Loop(_)
|
||||
| Expr::Macro(_)
|
||||
| Expr::Match(_)
|
||||
| Expr::MethodCall(_)
|
||||
| Expr::Paren(_)
|
||||
| Expr::Path(_)
|
||||
| Expr::Range(_)
|
||||
| Expr::RawAddr(_)
|
||||
| Expr::Reference(_)
|
||||
| Expr::Repeat(_)
|
||||
| Expr::Return(_)
|
||||
| Expr::Struct(_)
|
||||
| Expr::Try(_)
|
||||
| Expr::TryBlock(_)
|
||||
| Expr::Tuple(_)
|
||||
| Expr::Unary(_)
|
||||
| Expr::Unsafe(_)
|
||||
| Expr::While(_)
|
||||
| Expr::Yield(_)
|
||||
| Expr::Verbatim(_) => break,
|
||||
};
|
||||
}
|
||||
attrs.extend(attr_target.replace_attrs(Vec::new()));
|
||||
attr_target.replace_attrs(attrs);
|
||||
|
||||
let semi_token: Option<Token![;]> = input.parse()?;
|
||||
|
||||
match e {
|
||||
Expr::Macro(ExprMacro { attrs, mac })
|
||||
if semi_token.is_some() || mac.delimiter.is_brace() =>
|
||||
{
|
||||
return Ok(Stmt::Macro(StmtMacro {
|
||||
attrs,
|
||||
mac,
|
||||
semi_token,
|
||||
}));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if semi_token.is_some() {
|
||||
Ok(Stmt::Expr(e, semi_token))
|
||||
} else if allow_nosemi.0 || !classify::requires_semi_to_be_stmt(&e) {
|
||||
Ok(Stmt::Expr(e, None))
|
||||
} else {
|
||||
Err(input.error("expected semicolon"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
pub(crate) mod printing {
|
||||
use crate::classify;
|
||||
use crate::expr::{self, Expr};
|
||||
use crate::fixup::FixupContext;
|
||||
use crate::stmt::{Block, Local, Stmt, StmtMacro};
|
||||
use crate::token;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Block {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
tokens.append_all(&self.stmts);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Stmt {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
Stmt::Local(local) => local.to_tokens(tokens),
|
||||
Stmt::Item(item) => item.to_tokens(tokens),
|
||||
Stmt::Expr(expr, semi) => {
|
||||
expr::printing::print_expr(expr, tokens, FixupContext::new_stmt());
|
||||
semi.to_tokens(tokens);
|
||||
}
|
||||
Stmt::Macro(mac) => mac.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Local {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
expr::printing::outer_attrs_to_tokens(&self.attrs, tokens);
|
||||
self.let_token.to_tokens(tokens);
|
||||
self.pat.to_tokens(tokens);
|
||||
if let Some(init) = &self.init {
|
||||
init.eq_token.to_tokens(tokens);
|
||||
expr::printing::print_subexpression(
|
||||
&init.expr,
|
||||
init.diverge.is_some() && classify::expr_trailing_brace(&init.expr),
|
||||
tokens,
|
||||
FixupContext::NONE,
|
||||
);
|
||||
if let Some((else_token, diverge)) = &init.diverge {
|
||||
else_token.to_tokens(tokens);
|
||||
match &**diverge {
|
||||
Expr::Block(diverge) => diverge.to_tokens(tokens),
|
||||
_ => token::Brace::default().surround(tokens, |tokens| {
|
||||
expr::printing::print_expr(diverge, tokens, FixupContext::new_stmt());
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
self.semi_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for StmtMacro {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
expr::printing::outer_attrs_to_tokens(&self.attrs, tokens);
|
||||
self.mac.to_tokens(tokens);
|
||||
self.semi_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use std::fmt::{self, Debug};
|
||||
use std::thread::{self, ThreadId};
|
||||
|
||||
/// ThreadBound is a Sync-maker and Send-maker that allows accessing a value
|
||||
/// of type T only from the original thread on which the ThreadBound was
|
||||
/// constructed.
|
||||
pub(crate) struct ThreadBound<T> {
|
||||
value: T,
|
||||
thread_id: ThreadId,
|
||||
}
|
||||
|
||||
unsafe impl<T> Sync for ThreadBound<T> {}
|
||||
|
||||
// Send bound requires Copy, as otherwise Drop could run in the wrong place.
|
||||
//
|
||||
// Today Copy and Drop are mutually exclusive so `T: Copy` implies `T: !Drop`.
|
||||
// This impl needs to be revisited if that restriction is relaxed in the future.
|
||||
unsafe impl<T: Copy> Send for ThreadBound<T> {}
|
||||
|
||||
impl<T> ThreadBound<T> {
|
||||
pub(crate) fn new(value: T) -> Self {
|
||||
ThreadBound {
|
||||
value,
|
||||
thread_id: thread::current().id(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get(&self) -> Option<&T> {
|
||||
if thread::current().id() == self.thread_id {
|
||||
Some(&self.value)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Debug> Debug for ThreadBound<T> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self.get() {
|
||||
Some(value) => Debug::fmt(value, formatter),
|
||||
None => formatter.write_str("unknown"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Copy the bytes of T, even if the currently running thread is the "wrong"
|
||||
// thread. This is fine as long as the original thread is not simultaneously
|
||||
// mutating this value via interior mutability, which would be a data race.
|
||||
//
|
||||
// Currently `T: Copy` is sufficient to guarantee that T contains no interior
|
||||
// mutability, because _all_ interior mutability in Rust is built on
|
||||
// std::cell::UnsafeCell, which has no Copy impl. This impl needs to be
|
||||
// revisited if that restriction is relaxed in the future.
|
||||
impl<T: Copy> Copy for ThreadBound<T> {}
|
||||
|
||||
impl<T: Copy> Clone for ThreadBound<T> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,109 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use proc_macro2::{Delimiter, TokenStream, TokenTree};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
pub(crate) struct TokenTreeHelper<'a>(pub &'a TokenTree);
|
||||
|
||||
impl<'a> PartialEq for TokenTreeHelper<'a> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
use proc_macro2::Spacing;
|
||||
|
||||
match (self.0, other.0) {
|
||||
(TokenTree::Group(g1), TokenTree::Group(g2)) => {
|
||||
match (g1.delimiter(), g2.delimiter()) {
|
||||
(Delimiter::Parenthesis, Delimiter::Parenthesis)
|
||||
| (Delimiter::Brace, Delimiter::Brace)
|
||||
| (Delimiter::Bracket, Delimiter::Bracket)
|
||||
| (Delimiter::None, Delimiter::None) => {}
|
||||
_ => return false,
|
||||
}
|
||||
|
||||
let s1 = g1.stream().into_iter();
|
||||
let mut s2 = g2.stream().into_iter();
|
||||
|
||||
for item1 in s1 {
|
||||
let item2 = match s2.next() {
|
||||
Some(item) => item,
|
||||
None => return false,
|
||||
};
|
||||
if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
s2.next().is_none()
|
||||
}
|
||||
(TokenTree::Punct(o1), TokenTree::Punct(o2)) => {
|
||||
o1.as_char() == o2.as_char()
|
||||
&& match (o1.spacing(), o2.spacing()) {
|
||||
(Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
(TokenTree::Literal(l1), TokenTree::Literal(l2)) => l1.to_string() == l2.to_string(),
|
||||
(TokenTree::Ident(s1), TokenTree::Ident(s2)) => s1 == s2,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Hash for TokenTreeHelper<'a> {
|
||||
fn hash<H: Hasher>(&self, h: &mut H) {
|
||||
use proc_macro2::Spacing;
|
||||
|
||||
match self.0 {
|
||||
TokenTree::Group(g) => {
|
||||
0u8.hash(h);
|
||||
match g.delimiter() {
|
||||
Delimiter::Parenthesis => 0u8.hash(h),
|
||||
Delimiter::Brace => 1u8.hash(h),
|
||||
Delimiter::Bracket => 2u8.hash(h),
|
||||
Delimiter::None => 3u8.hash(h),
|
||||
}
|
||||
|
||||
for item in g.stream() {
|
||||
TokenTreeHelper(&item).hash(h);
|
||||
}
|
||||
0xFFu8.hash(h); // terminator w/ a variant we don't normally hash
|
||||
}
|
||||
TokenTree::Punct(op) => {
|
||||
1u8.hash(h);
|
||||
op.as_char().hash(h);
|
||||
match op.spacing() {
|
||||
Spacing::Alone => 0u8.hash(h),
|
||||
Spacing::Joint => 1u8.hash(h),
|
||||
}
|
||||
}
|
||||
TokenTree::Literal(lit) => (2u8, lit.to_string()).hash(h),
|
||||
TokenTree::Ident(word) => (3u8, word).hash(h),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TokenStreamHelper<'a>(pub &'a TokenStream);
|
||||
|
||||
impl<'a> PartialEq for TokenStreamHelper<'a> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let left = self.0.clone().into_iter().collect::<Vec<_>>();
|
||||
let right = other.0.clone().into_iter().collect::<Vec<_>>();
|
||||
if left.len() != right.len() {
|
||||
return false;
|
||||
}
|
||||
for (a, b) in left.into_iter().zip(right) {
|
||||
if TokenTreeHelper(&a) != TokenTreeHelper(&b) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Hash for TokenStreamHelper<'a> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
let tts = self.0.clone().into_iter().collect::<Vec<_>>();
|
||||
tts.len().hash(state);
|
||||
for tt in tts {
|
||||
TokenTreeHelper(&tt).hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,35 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
use crate::parse::ParseStream;
|
||||
use proc_macro2::{Delimiter, TokenStream};
|
||||
use std::cmp::Ordering;
|
||||
use std::iter;
|
||||
|
||||
pub(crate) fn between<'a>(begin: ParseStream<'a>, end: ParseStream<'a>) -> TokenStream {
|
||||
let end = end.cursor();
|
||||
let mut cursor = begin.cursor();
|
||||
assert!(crate::buffer::same_buffer(end, cursor));
|
||||
|
||||
let mut tokens = TokenStream::new();
|
||||
while cursor != end {
|
||||
let (tt, next) = cursor.token_tree().unwrap();
|
||||
|
||||
if crate::buffer::cmp_assuming_same_buffer(end, next) == Ordering::Less {
|
||||
// A syntax node can cross the boundary of a None-delimited group
|
||||
// due to such groups being transparent to the parser in most cases.
|
||||
// Any time this occurs the group is known to be semantically
|
||||
// irrelevant. https://github.com/dtolnay/syn/issues/1235
|
||||
if let Some((inside, _span, after)) = cursor.group(Delimiter::None) {
|
||||
assert!(next == after);
|
||||
cursor = inside;
|
||||
continue;
|
||||
} else {
|
||||
panic!("verbatim end must not be inside a delimited group");
|
||||
}
|
||||
}
|
||||
|
||||
tokens.extend(iter::once(tt));
|
||||
cursor = next;
|
||||
}
|
||||
tokens
|
||||
}
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
// SPDX-License-Identifier: Apache-2.0 OR MIT
|
||||
|
||||
pub(crate) fn skip(mut s: &str) -> &str {
|
||||
'skip: while !s.is_empty() {
|
||||
let byte = s.as_bytes()[0];
|
||||
if byte == b'/' {
|
||||
if s.starts_with("//")
|
||||
&& (!s.starts_with("///") || s.starts_with("////"))
|
||||
&& !s.starts_with("//!")
|
||||
{
|
||||
if let Some(i) = s.find('\n') {
|
||||
s = &s[i + 1..];
|
||||
continue;
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
} else if s.starts_with("/**/") {
|
||||
s = &s[4..];
|
||||
continue;
|
||||
} else if s.starts_with("/*")
|
||||
&& (!s.starts_with("/**") || s.starts_with("/***"))
|
||||
&& !s.starts_with("/*!")
|
||||
{
|
||||
let mut depth = 0;
|
||||
let bytes = s.as_bytes();
|
||||
let mut i = 0;
|
||||
let upper = bytes.len() - 1;
|
||||
while i < upper {
|
||||
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
||||
depth += 1;
|
||||
i += 1; // eat '*'
|
||||
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
s = &s[i + 2..];
|
||||
continue 'skip;
|
||||
}
|
||||
i += 1; // eat '/'
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
return s;
|
||||
}
|
||||
}
|
||||
match byte {
|
||||
b' ' | 0x09..=0x0D => {
|
||||
s = &s[1..];
|
||||
continue;
|
||||
}
|
||||
b if b <= 0x7F => {}
|
||||
_ => {
|
||||
let ch = s.chars().next().unwrap();
|
||||
if is_whitespace(ch) {
|
||||
s = &s[ch.len_utf8()..];
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return s;
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
fn is_whitespace(ch: char) -> bool {
|
||||
// Rust treats left-to-right mark and right-to-left mark as whitespace
|
||||
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
||||
}
|
||||
|
|
@ -15,7 +15,7 @@ def args_crates_cfgs(cfgs):
|
|||
crates_cfgs = {}
|
||||
for cfg in cfgs:
|
||||
crate, vals = cfg.split("=", 1)
|
||||
crates_cfgs[crate] = vals.replace("--cfg", "").split()
|
||||
crates_cfgs[crate] = vals.split()
|
||||
|
||||
return crates_cfgs
|
||||
|
||||
|
|
@ -86,10 +86,31 @@ def generate_crates(srctree, objtree, sysroot_src, external_src, cfgs, core_edit
|
|||
[],
|
||||
)
|
||||
|
||||
append_crate(
|
||||
"proc_macro2",
|
||||
srctree / "rust" / "proc-macro2" / "lib.rs",
|
||||
["core", "alloc", "std", "proc_macro"],
|
||||
cfg=crates_cfgs["proc_macro2"],
|
||||
)
|
||||
|
||||
append_crate(
|
||||
"quote",
|
||||
srctree / "rust" / "quote" / "lib.rs",
|
||||
["alloc", "proc_macro", "proc_macro2"],
|
||||
cfg=crates_cfgs["quote"],
|
||||
)
|
||||
|
||||
append_crate(
|
||||
"syn",
|
||||
srctree / "rust" / "syn" / "lib.rs",
|
||||
["proc_macro", "proc_macro2", "quote"],
|
||||
cfg=crates_cfgs["syn"],
|
||||
)
|
||||
|
||||
append_crate(
|
||||
"macros",
|
||||
srctree / "rust" / "macros" / "lib.rs",
|
||||
["std", "proc_macro"],
|
||||
["std", "proc_macro", "proc_macro2", "quote", "syn"],
|
||||
is_proc_macro=True,
|
||||
)
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue