Skip to content

Rollup of 11 pull requests #98335

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 26 commits into from
Jun 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
4505579
adjust transmute const stabilization version
RalfJung May 22, 2022
976336d
Add proper tracing spans to rustc_trait_selection::traits::error_repo…
coolreader18 Jun 6, 2022
ccf6124
Fix erroneous span for borrowck error
compiler-errors Jun 12, 2022
ccd956a
Remove `Cursor::append`.
nnethercote Jun 19, 2022
178b746
Remove `Cursor::index`.
nnethercote Jun 19, 2022
f6b5788
Remove `TokenStream::from_streams`.
nnethercote Jun 19, 2022
2a5487a
Merge `TokenStreamBuilder::push` into `TokenStreamBuilder::build`.
nnethercote Jun 20, 2022
69f45b7
Add blank lines between methods in `proc_macro_server.rs`.
nnethercote Jun 20, 2022
bd60475
`try_fold_unevaluated` for infallible folders
eggyal Jun 20, 2022
27dcebe
Improve loading of crates.js and sidebar-items.js
jsha Jun 15, 2022
7098a71
Improve docs for `is_running` to explain use case
joshtriplett Feb 15, 2022
a171a6b
Remove lies in comments.
m-ou-se Jun 20, 2022
db542e4
:arrow_up: rust-analyzer
lnicola Jun 21, 2022
d2ea7e2
Avoid an ICE and instead let the compiler report a useful error
oli-obk Jun 21, 2022
44dbd98
update ioslice docs to use shared slices
conradludgate Jun 21, 2022
84c17c2
Rollup merge of #94033 - joshtriplett:documentation-is-running-better…
JohnTitor Jun 21, 2022
b20aff2
Rollup merge of #97269 - RalfJung:transmute, r=m-ou-se
JohnTitor Jun 21, 2022
9c800ec
Rollup merge of #97805 - coolreader18:trace-suggestions, r=oli-obk
JohnTitor Jun 21, 2022
a7f789b
Rollup merge of #98022 - compiler-errors:erroneous-borrowck-span, r=o…
JohnTitor Jun 21, 2022
75f17ed
Rollup merge of #98124 - jsha:defer-crates, r=GuillaumeGomez
JohnTitor Jun 21, 2022
b1d2e5c
Rollup merge of #98278 - nnethercote:some-token-stream-cleanups, r=pe…
JohnTitor Jun 21, 2022
51a6091
Rollup merge of #98306 - eggyal:add-unevaluated-to-blanket-falliblety…
JohnTitor Jun 21, 2022
18b01d5
Rollup merge of #98313 - m-ou-se:fix-comments, r=joshtriplett
JohnTitor Jun 21, 2022
21a20b4
Rollup merge of #98323 - lnicola:rust-analyzer-2022-06-21, r=lnicola
JohnTitor Jun 21, 2022
1b48f09
Rollup merge of #98329 - oli-obk:fast_path_ice, r=cjgillot
JohnTitor Jun 21, 2022
e509242
Rollup merge of #98330 - conradludgate:io-slice-mut-docs, r=Dylan-DPC
JohnTitor Jun 21, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
152 changes: 57 additions & 95 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use rustc_span::{Span, DUMMY_SP};
use smallvec::{smallvec, SmallVec};

use std::{fmt, iter, mem};
use std::{fmt, iter};

/// When the main Rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token tree. This is a very
Expand Down Expand Up @@ -399,45 +399,6 @@ impl TokenStream {
self.0.len()
}

pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
match streams.len() {
0 => TokenStream::default(),
1 => streams.pop().unwrap(),
_ => {
// We are going to extend the first stream in `streams` with
// the elements from the subsequent streams. This requires
// using `make_mut()` on the first stream, and in practice this
// doesn't cause cloning 99.9% of the time.
//
// One very common use case is when `streams` has two elements,
// where the first stream has any number of elements within
// (often 1, but sometimes many more) and the second stream has
// a single element within.

// Determine how much the first stream will be extended.
// Needed to avoid quadratic blow up from on-the-fly
// reallocations (#57735).
let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();

// Get the first stream. If it's `None`, create an empty
// stream.
let mut iter = streams.drain(..);
let mut first_stream_lrc = iter.next().unwrap().0;

// Append the elements to the first stream, after reserving
// space for them.
let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
first_vec_mut.reserve(num_appends);
for stream in iter {
first_vec_mut.extend(stream.0.iter().cloned());
}

// Create the final `TokenStream`.
TokenStream(first_stream_lrc)
}
}
}

pub fn trees(&self) -> CursorRef<'_> {
CursorRef::new(self)
}
Expand Down Expand Up @@ -562,50 +523,65 @@ impl TokenStreamBuilder {
}

pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
let mut stream = stream.into();

// If `self` is not empty and the last tree within the last stream is a
// token tree marked with `Joint`...
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut()
&& let Some((TokenTree::Token(last_token), Spacing::Joint)) = last_stream_lrc.last()
// ...and `stream` is not empty and the first tree within it is
// a token tree...
&& let TokenStream(ref mut stream_lrc) = stream
&& let Some((TokenTree::Token(token), spacing)) = stream_lrc.first()
// ...and the two tokens can be glued together...
&& let Some(glued_tok) = last_token.glue(&token)
{
// ...then do so, by overwriting the last token
// tree in `self` and removing the first token tree
// from `stream`. This requires using `make_mut()`
// on the last stream in `self` and on `stream`,
// and in practice this doesn't cause cloning 99.9%
// of the time.

// Overwrite the last token tree with the merged
// token.
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
*last_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);

// Remove the first token tree from `stream`. (This
// is almost always the only tree in `stream`.)
let stream_vec_mut = Lrc::make_mut(stream_lrc);
stream_vec_mut.remove(0);

// Don't push `stream` if it's empty -- that could
// block subsequent token gluing, by getting
// between two token trees that should be glued
// together.
if !stream.is_empty() {
self.0.push(stream);
}
return;
}
self.0.push(stream);
self.0.push(stream.into());
}

pub fn build(self) -> TokenStream {
TokenStream::from_streams(self.0)
let mut streams = self.0;
match streams.len() {
0 => TokenStream::default(),
1 => streams.pop().unwrap(),
_ => {
// We will extend the first stream in `streams` with the
// elements from the subsequent streams. This requires using
// `make_mut()` on the first stream, and in practice this
// doesn't cause cloning 99.9% of the time.
//
// One very common use case is when `streams` has two elements,
// where the first stream has any number of elements within
// (often 1, but sometimes many more) and the second stream has
// a single element within.

// Determine how much the first stream will be extended.
// Needed to avoid quadratic blow up from on-the-fly
// reallocations (#57735).
let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();

// Get the first stream, which will become the result stream.
// If it's `None`, create an empty stream.
let mut iter = streams.drain(..);
let mut res_stream_lrc = iter.next().unwrap().0;

// Append the subsequent elements to the result stream, after
// reserving space for them.
let res_vec_mut = Lrc::make_mut(&mut res_stream_lrc);
res_vec_mut.reserve(num_appends);
for stream in iter {
let stream_iter = stream.0.iter().cloned();

// If (a) `res_mut_vec` is not empty and the last tree
// within it is a token tree marked with `Joint`, and (b)
// `stream` is not empty and the first tree within it is a
// token tree, and (c) the two tokens can be glued
// together...
if let Some((TokenTree::Token(last_tok), Spacing::Joint)) = res_vec_mut.last()
&& let Some((TokenTree::Token(tok), spacing)) = stream.0.first()
&& let Some(glued_tok) = last_tok.glue(&tok)
{
// ...then overwrite the last token tree in
// `res_vec_mut` with the glued token, and skip the
// first token tree from `stream`.
*res_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
res_vec_mut.extend(stream_iter.skip(1));
} else {
// Append all of `stream`.
res_vec_mut.extend(stream_iter);
}
}

TokenStream(res_stream_lrc)
}
}
}
}

Expand Down Expand Up @@ -679,20 +655,6 @@ impl Cursor {
})
}

pub fn index(&self) -> usize {
self.index
}

pub fn append(&mut self, new_stream: TokenStream) {
if new_stream.is_empty() {
return;
}
let index = self.index;
let stream = mem::take(&mut self.stream);
*self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
self.index = index;
}

pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
}
Expand Down
24 changes: 14 additions & 10 deletions compiler/rustc_borrowck/src/type_check/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,12 +357,20 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
.add_element(live_region_vid, location);
});

// HACK(compiler-errors): Constants that are gathered into Body.required_consts
// have their locations erased...
let locations = if location != Location::START {
location.to_locations()
} else {
Locations::All(constant.span)
};

if let Some(annotation_index) = constant.user_ty {
if let Err(terr) = self.cx.relate_type_and_user_type(
constant.literal.ty(),
ty::Variance::Invariant,
&UserTypeProjection { base: annotation_index, projs: vec![] },
location.to_locations(),
locations,
ConstraintCategory::Boring,
) {
let annotation = &self.cx.user_type_annotations[annotation_index];
Expand Down Expand Up @@ -390,12 +398,9 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
promoted: &Body<'tcx>,
ty,
san_ty| {
if let Err(terr) = verifier.cx.eq_types(
ty,
san_ty,
location.to_locations(),
ConstraintCategory::Boring,
) {
if let Err(terr) =
verifier.cx.eq_types(ty, san_ty, locations, ConstraintCategory::Boring)
{
span_mirbug!(
verifier,
promoted,
Expand All @@ -416,7 +421,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
}
} else {
if let Err(terr) = self.cx.fully_perform_op(
location.to_locations(),
locations,
ConstraintCategory::Boring,
self.cx.param_env.and(type_op::ascribe_user_type::AscribeUserType::new(
constant.literal.ty(),
Expand All @@ -435,7 +440,6 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
}
} else if let Some(static_def_id) = constant.check_static_ptr(tcx) {
let unnormalized_ty = tcx.type_of(static_def_id);
let locations = location.to_locations();
let normalized_ty = self.cx.normalize(unnormalized_ty, locations);
let literal_ty = constant.literal.ty().builtin_deref(true).unwrap().ty;

Expand All @@ -454,7 +458,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
self.cx.normalize_and_prove_instantiated_predicates(
def_id,
instantiated_predicates,
location.to_locations(),
locations,
);
}
}
Expand Down
Loading