Skip to content

Commit

Permalink
Auto merge of #47327 - MaloJaffre:beta-backport, r=Mark-Simulacrum
Browse files Browse the repository at this point in the history
[beta] Backports

Cherry-picked (cleanly) into beta:
- #46916
- #47161
- #47208
- #47269
  • Loading branch information
bors committed Jan 10, 2018
2 parents 2a65c6a + 6ff413e commit a19122c
Show file tree
Hide file tree
Showing 40 changed files with 200 additions and 81 deletions.
36 changes: 18 additions & 18 deletions src/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion src/bootstrap/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub const CFG_RELEASE_NUM: &str = "1.24.0";
// An optional number to put after the label, e.g. '.2' -> '-beta.2'
// Be sure to make this starts with a dot to conform to semver pre-release
// versions (section 9)
pub const CFG_PRERELEASE_VERSION: &str = ".1";
pub const CFG_PRERELEASE_VERSION: &str = ".2";

pub struct GitInfo {
inner: Option<Info>,
Expand Down
16 changes: 10 additions & 6 deletions src/libcore/str/pattern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ unsafe impl<'a> Searcher<'a> for CharSearcher<'a> {
#[inline]
fn next(&mut self) -> SearchStep {
let old_finger = self.finger;
let slice = unsafe { self.haystack.get_unchecked(old_finger..self.haystack.len()) };
let slice = unsafe { self.haystack.get_unchecked(old_finger..self.finger_back) };
let mut iter = slice.chars();
let old_len = iter.iter.len();
if let Some(ch) = iter.next() {
Expand All @@ -304,7 +304,8 @@ unsafe impl<'a> Searcher<'a> for CharSearcher<'a> {
fn next_match(&mut self) -> Option<(usize, usize)> {
loop {
// get the haystack after the last character found
let bytes = if let Some(slice) = self.haystack.as_bytes().get(self.finger..) {
let bytes = if let Some(slice) = self.haystack.as_bytes()
.get(self.finger..self.finger_back) {
slice
} else {
return None;
Expand Down Expand Up @@ -340,7 +341,7 @@ unsafe impl<'a> Searcher<'a> for CharSearcher<'a> {
}
} else {
// found nothing, exit
self.finger = self.haystack.len();
self.finger = self.finger_back;
return None;
}
}
Expand All @@ -353,7 +354,7 @@ unsafe impl<'a> ReverseSearcher<'a> for CharSearcher<'a> {
#[inline]
fn next_back(&mut self) -> SearchStep {
let old_finger = self.finger_back;
let slice = unsafe { self.haystack.slice_unchecked(0, old_finger) };
let slice = unsafe { self.haystack.slice_unchecked(self.finger, old_finger) };
let mut iter = slice.chars();
let old_len = iter.iter.len();
if let Some(ch) = iter.next_back() {
Expand All @@ -374,14 +375,17 @@ unsafe impl<'a> ReverseSearcher<'a> for CharSearcher<'a> {
let haystack = self.haystack.as_bytes();
loop {
// get the haystack up to but not including the last character searched
let bytes = if let Some(slice) = haystack.get(..self.finger_back) {
let bytes = if let Some(slice) = haystack.get(self.finger..self.finger_back) {
slice
} else {
return None;
};
// the last byte of the utf8 encoded needle
let last_byte = unsafe { *self.utf8_encoded.get_unchecked(self.utf8_size - 1) };
if let Some(index) = memchr::memrchr(last_byte, bytes) {
// we searched a slice that was offset by self.finger,
// add self.finger to recoup the original index
let index = self.finger + index;
// memrchr will return the index of the byte we wish to
// find. In case of an ASCII character, this is indeed
// were we wish our new finger to be ("after" the found
Expand Down Expand Up @@ -412,7 +416,7 @@ unsafe impl<'a> ReverseSearcher<'a> for CharSearcher<'a> {
// found the last byte when searching in reverse.
self.finger_back = index;
} else {
self.finger_back = 0;
self.finger_back = self.finger;
// found nothing, exit
return None;
}
Expand Down
38 changes: 38 additions & 0 deletions src/libcore/tests/pattern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -262,3 +262,41 @@ fn test_reverse_search_shared_bytes() {
[InRange(37, 40), Rejects(34, 37), InRange(10, 13), Rejects(8, 10), Done]
);
}

#[test]
fn double_ended_regression_test() {
// https://github.com/rust-lang/rust/issues/47175
// Ensures that double ended searching comes to a convergence
search_asserts!("abcdeabcdeabcde", 'a', "alternating double ended search",
[next_match, next_match_back, next_match, next_match_back],
[InRange(0, 1), InRange(10, 11), InRange(5, 6), Done]
);
search_asserts!("abcdeabcdeabcde", 'a', "triple double ended search for a",
[next_match, next_match_back, next_match_back, next_match_back],
[InRange(0, 1), InRange(10, 11), InRange(5, 6), Done]
);
search_asserts!("abcdeabcdeabcde", 'd', "triple double ended search for d",
[next_match, next_match_back, next_match_back, next_match_back],
[InRange(3, 4), InRange(13, 14), InRange(8, 9), Done]
);
search_asserts!(STRESS, 'Á', "Double ended search for two-byte Latin character",
[next_match, next_match_back, next_match, next_match_back],
[InRange(0, 2), InRange(32, 34), InRange(8, 10), Done]
);
search_asserts!(STRESS, '각', "Reverse double ended search for three-byte Hangul character",
[next_match_back, next_back, next_match, next, next_match_back, next_match],
[InRange(34, 37), Rejects(32, 34), InRange(19, 22), Rejects(22, 25), InRange(28, 31), Done]
);
search_asserts!(STRESS, 'ก', "Double ended search for three-byte Thai character",
[next_match, next_back, next, next_match_back, next_match],
[InRange(22, 25), Rejects(47, 48), Rejects(25, 28), InRange(40, 43), Done]
);
search_asserts!(STRESS, '😁', "Double ended search for four-byte emoji",
[next_match_back, next, next_match, next_back, next_match],
[InRange(43, 47), Rejects(0, 2), InRange(15, 19), Rejects(40, 43), Done]
);
search_asserts!(STRESS, 'ꁁ', "Double ended search for three-byte Yi character with repeated bytes",
[next_match, next, next_match_back, next_back, next_match],
[InRange(10, 13), Rejects(13, 14), InRange(37, 40), Rejects(34, 37), Done]
);
}
2 changes: 1 addition & 1 deletion src/librustc/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ bitflags = "1.0"
fmt_macros = { path = "../libfmt_macros" }
graphviz = { path = "../libgraphviz" }
jobserver = "0.1"
log = "0.3"
log = "0.4"
rustc_apfloat = { path = "../librustc_apfloat" }
rustc_back = { path = "../librustc_back" }
rustc_const_math = { path = "../librustc_const_math" }
Expand Down
2 changes: 1 addition & 1 deletion src/librustc/hir/map/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1068,7 +1068,7 @@ pub fn map_crate<'hir>(sess: &::session::Session,
cmdline_args)
};

if log_enabled!(::log::LogLevel::Debug) {
if log_enabled!(::log::Level::Debug) {
// This only makes sense for ordered stores; note the
// enumerate to count the number of entries.
let (entries_less_1, _) = map.iter().filter(|&x| {
Expand Down
12 changes: 12 additions & 0 deletions src/librustc/mir/mono.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,11 @@ use syntax::ast::NodeId;
use syntax::symbol::InternedString;
use ty::Instance;
use util::nodemap::FxHashMap;
use rustc_data_structures::base_n;
use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult,
StableHasher};
use ich::{Fingerprint, StableHashingContext, NodeIdHashingMode};
use std::hash::Hash;

#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
pub enum MonoItem<'tcx> {
Expand Down Expand Up @@ -119,6 +121,16 @@ impl<'tcx> CodegenUnit<'tcx> {
{
&mut self.items
}

pub fn mangle_name(human_readable_name: &str) -> String {
// We generate a 80 bit hash from the name. This should be enough to
// avoid collisions and is still reasonably short for filenames.
let mut hasher = StableHasher::new();
human_readable_name.hash(&mut hasher);
let hash: u128 = hasher.finish();
let hash = hash & ((1u128 << 80) - 1);
base_n::encode(hash, base_n::CASE_INSENSITIVE)
}
}

impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> {
Expand Down
2 changes: 2 additions & 0 deletions src/librustc/session/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1234,6 +1234,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"rewrite operators on i128 and u128 into lang item calls (typically provided \
by compiler-builtins) so translation doesn't need to support them,
overriding the default for the current target"),
human_readable_cgu_names: bool = (false, parse_bool, [TRACKED],
"generate human-readable, predictable names for codegen units"),
}

pub fn default_lib_output() -> CrateType {
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_back/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ crate-type = ["dylib"]
[dependencies]
syntax = { path = "../libsyntax" }
serialize = { path = "../libserialize" }
log = "0.3"
log = "0.4"
rand = "0.3"

[features]
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_borrowck/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ crate-type = ["dylib"]
test = false

[dependencies]
log = "0.3"
log = "0.4"
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
graphviz = { path = "../libgraphviz" }
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_const_eval/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ crate-type = ["dylib"]

[dependencies]
arena = { path = "../libarena" }
log = "0.3"
log = "0.4"
rustc = { path = "../librustc" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_data_structures/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ path = "lib.rs"
crate-type = ["dylib"]

[dependencies]
log = "0.3"
log = "0.4"
serialize = { path = "../libserialize" }
cfg-if = "0.1.2"
stable_deref_trait = "1.0.0"
parking_lot_core = "0.2.8"

[dependencies.parking_lot]
version = "0.5"
features = ["nightly"]
features = ["nightly"]
Loading

0 comments on commit a19122c

Please sign in to comment.