Rollup merge of #58367 - nnethercote:rm-two-dead-funcs, r=alexcrichton
Remove two dead functions.
diff --git a/src/libarena/Cargo.toml b/src/libarena/Cargo.toml
index e2af67d..82fc64b 100644
--- a/src/libarena/Cargo.toml
+++ b/src/libarena/Cargo.toml
@@ -2,6 +2,7 @@
authors = ["The Rust Project Developers"]
name = "arena"
version = "0.0.0"
+edition = "2018"
[lib]
name = "arena"
@@ -9,4 +10,4 @@
crate-type = ["dylib"]
[dependencies]
-rustc_data_structures = { path = "../librustc_data_structures" }
\ No newline at end of file
+rustc_data_structures = { path = "../librustc_data_structures" }
diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs
index aa522d8..8ae046c 100644
--- a/src/libarena/lib.rs
+++ b/src/libarena/lib.rs
@@ -11,17 +11,17 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/",
test(no_crate_inject, attr(deny(warnings))))]
+#![deny(rust_2018_idioms)]
+
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
-#![feature(nll)]
#![feature(raw_vec_internals)]
#![cfg_attr(test, feature(test))]
#![allow(deprecated)]
extern crate alloc;
-extern crate rustc_data_structures;
use rustc_data_structures::sync::MTLock;
@@ -476,7 +476,7 @@
#[cfg(test)]
mod tests {
extern crate test;
- use self::test::Bencher;
+ use test::Bencher;
use super::TypedArena;
use std::cell::Cell;
@@ -511,15 +511,15 @@
impl<'a> Wrap<'a> {
fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
- let r: &EI = self.0.alloc(EI::I(f()));
+ let r: &EI<'_> = self.0.alloc(EI::I(f()));
if let &EI::I(ref i) = r {
i
} else {
panic!("mismatch");
}
}
- fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
- let r: &EI = self.0.alloc(EI::O(f()));
+ fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer<'_> {
+ let r: &EI<'_> = self.0.alloc(EI::O(f()));
if let &EI::O(ref o) = r {
o
} else {
@@ -609,7 +609,7 @@
count: &'a Cell<u32>,
}
- impl<'a> Drop for DropCounter<'a> {
+ impl Drop for DropCounter<'_> {
fn drop(&mut self) {
self.count.set(self.count.get() + 1);
}
@@ -619,7 +619,7 @@
fn test_typed_arena_drop_count() {
let counter = Cell::new(0);
{
- let arena: TypedArena<DropCounter> = TypedArena::default();
+ let arena: TypedArena<DropCounter<'_>> = TypedArena::default();
for _ in 0..100 {
// Allocate something with drop glue to make sure it doesn't leak.
arena.alloc(DropCounter { count: &counter });
@@ -631,7 +631,7 @@
#[test]
fn test_typed_arena_drop_on_clear() {
let counter = Cell::new(0);
- let mut arena: TypedArena<DropCounter> = TypedArena::default();
+ let mut arena: TypedArena<DropCounter<'_>> = TypedArena::default();
for i in 0..10 {
for _ in 0..100 {
// Allocate something with drop glue to make sure it doesn't leak.
diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs
index ea67c01..aacd6ce 100644
--- a/src/libfmt_macros/lib.rs
+++ b/src/libfmt_macros/lib.rs
@@ -10,6 +10,7 @@
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(rustc_private)]
pub use Piece::*;
diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs
index 8ce0f75..fadcfae 100644
--- a/src/libgraphviz/lib.rs
+++ b/src/libgraphviz/lib.rs
@@ -276,6 +276,7 @@
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(str_escape)]
use LabelText::*;
diff --git a/src/libpanic_abort/lib.rs b/src/libpanic_abort/lib.rs
index 7c6f36e..edc97cd 100644
--- a/src/libpanic_abort/lib.rs
+++ b/src/libpanic_abort/lib.rs
@@ -14,6 +14,7 @@
#![feature(core_intrinsics)]
#![feature(libc)]
+#![feature(nll)]
#![feature(panic_runtime)]
#![feature(staged_api)]
#![feature(rustc_attrs)]
diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs
index 2cdc5a4..09a4a96 100644
--- a/src/libproc_macro/lib.rs
+++ b/src/libproc_macro/lib.rs
@@ -17,6 +17,7 @@
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(staged_api)]
#![feature(const_fn)]
#![feature(extern_types)]
diff --git a/src/libprofiler_builtins/lib.rs b/src/libprofiler_builtins/lib.rs
index 9c8d3a1..2ce1a11 100644
--- a/src/libprofiler_builtins/lib.rs
+++ b/src/libprofiler_builtins/lib.rs
@@ -5,5 +5,6 @@
reason = "internal implementation detail of rustc right now",
issue = "0")]
#![allow(unused_features)]
+#![feature(nll)]
#![feature(staged_api)]
#![deny(rust_2018_idioms)]
diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs
index 955f834..2bf8218 100644
--- a/src/librustc/hir/map/mod.rs
+++ b/src/librustc/hir/map/mod.rs
@@ -934,7 +934,9 @@
}
}
- pub fn expect_variant_data(&self, id: NodeId) -> &'hir VariantData {
+ pub fn expect_variant_data(&self, id: HirId) -> &'hir VariantData {
+ let id = self.hir_to_node_id(id); // FIXME(@ljedrz): remove when possible
+
match self.find(id) {
Some(Node::Item(i)) => {
match i.node {
@@ -949,7 +951,9 @@
}
}
- pub fn expect_variant(&self, id: NodeId) -> &'hir Variant {
+ pub fn expect_variant(&self, id: HirId) -> &'hir Variant {
+ let id = self.hir_to_node_id(id); // FIXME(@ljedrz): remove when possible
+
match self.find(id) {
Some(Node::Variant(variant)) => variant,
_ => bug!("expected variant, found {}", self.node_to_string(id)),
diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs
index 3ff76e9..cb31441 100644
--- a/src/librustc/lint/builtin.rs
+++ b/src/librustc/lint/builtin.rs
@@ -473,6 +473,7 @@
MacroExpandedMacroExportsAccessedByAbsolutePaths(Span),
ElidedLifetimesInPaths(usize, Span, bool, Span, String),
UnknownCrateTypes(Span, String, String),
+ UnusedImports(String, Vec<(Span, String)>),
}
impl BuiltinLintDiagnostics {
@@ -554,6 +555,15 @@
BuiltinLintDiagnostics::UnknownCrateTypes(span, note, sugg) => {
db.span_suggestion(span, ¬e, sugg, Applicability::MaybeIncorrect);
}
+ BuiltinLintDiagnostics::UnusedImports(message, replaces) => {
+ if !replaces.is_empty() {
+ db.multipart_suggestion(
+ &message,
+ replaces,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
}
}
}
diff --git a/src/librustc/macros.rs b/src/librustc/macros.rs
index 2978b35..ccae9d3 100644
--- a/src/librustc/macros.rs
+++ b/src/librustc/macros.rs
@@ -62,38 +62,36 @@
#[macro_export]
macro_rules! impl_stable_hash_for {
// Enums
- // FIXME(mark-i-m): Some of these should be `?` rather than `*`. See the git blame and change
- // them back when `?` is supported again.
(enum $enum_name:path {
$( $variant:ident
// this incorrectly allows specifying both tuple-like and struct-like fields, as in `Variant(a,b){c,d}`,
// when it should be only one or the other
- $( ( $($field:ident $(-> $delegate:tt)*),* ) )*
- $( { $($named_field:ident $(-> $named_delegate:tt)*),* } )*
- ),* $(,)*
+ $( ( $($field:ident $(-> $delegate:tt)?),* ) )?
+ $( { $($named_field:ident $(-> $named_delegate:tt)?),* } )?
+ ),* $(,)?
}) => {
impl_stable_hash_for!(
impl<> for enum $enum_name [ $enum_name ] { $( $variant
- $( ( $($field $(-> $delegate)*),* ) )*
- $( { $($named_field $(-> $named_delegate)*),* } )*
+ $( ( $($field $(-> $delegate)?),* ) )?
+ $( { $($named_field $(-> $named_delegate)?),* } )?
),* }
);
};
// We want to use the enum name both in the `impl ... for $enum_name` as well as for
// importing all the variants. Unfortunately it seems we have to take the name
// twice for this purpose
- (impl<$($lt:lifetime $(: $lt_bound:lifetime)* ),* $(,)* $($T:ident),* $(,)*>
+ (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?>
for enum $enum_name:path
[ $enum_path:path ]
{
$( $variant:ident
// this incorrectly allows specifying both tuple-like and struct-like fields, as in `Variant(a,b){c,d}`,
// when it should be only one or the other
- $( ( $($field:ident $(-> $delegate:tt)*),* ) )*
- $( { $($named_field:ident $(-> $named_delegate:tt)*),* } )*
- ),* $(,)*
+ $( ( $($field:ident $(-> $delegate:tt)?),* ) )?
+ $( { $($named_field:ident $(-> $named_delegate:tt)?),* } )?
+ ),* $(,)?
}) => {
- impl<'a, $($lt $(: $lt_bound)*,)* $($T,)*>
+ impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>
for $enum_name
where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
@@ -107,9 +105,9 @@
match *self {
$(
- $variant $( ( $(ref $field),* ) )* $( { $(ref $named_field),* } )* => {
- $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*)*
- $($( __impl_stable_hash_field!($named_field, __ctx, __hasher $(, $named_delegate)*) );*)*
+ $variant $( ( $(ref $field),* ) )? $( { $(ref $named_field),* } )? => {
+ $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*)?
+ $($( __impl_stable_hash_field!($named_field, __ctx, __hasher $(, $named_delegate)?) );*)?
}
)*
}
@@ -117,16 +115,15 @@
}
};
// Structs
- // FIXME(mark-i-m): same here.
- (struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),* $(,)* }) => {
+ (struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
impl_stable_hash_for!(
- impl<'tcx> for struct $struct_name { $($field $(-> $delegate)*),* }
+ impl<'tcx> for struct $struct_name { $($field $(-> $delegate)?),* }
);
};
- (impl<$($lt:lifetime $(: $lt_bound:lifetime)* ),* $(,)* $($T:ident),* $(,)*> for struct $struct_name:path {
- $($field:ident $(-> $delegate:tt)*),* $(,)*
+ (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?> for struct $struct_name:path {
+ $($field:ident $(-> $delegate:tt)?),* $(,)?
}) => {
- impl<'a, $($lt $(: $lt_bound)*,)* $($T,)*>
+ impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
{
@@ -138,21 +135,20 @@
$(ref $field),*
} = *self;
- $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*
+ $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
}
}
};
// Tuple structs
- // We cannot use normale parentheses here, the parser won't allow it
- // FIXME(mark-i-m): same here.
- (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),* $(,)* }) => {
+ // We cannot use normal parentheses here, the parser won't allow it
+ (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
impl_stable_hash_for!(
- impl<'tcx> for tuple_struct $struct_name { $($field $(-> $delegate)*),* }
+ impl<'tcx> for tuple_struct $struct_name { $($field $(-> $delegate)?),* }
);
};
- (impl<$($lt:lifetime $(: $lt_bound:lifetime)* ),* $(,)* $($T:ident),* $(,)*>
- for tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),* $(,)* }) => {
- impl<'a, $($lt $(: $lt_bound)*,)* $($T,)*>
+ (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?>
+ for tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
+ impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
{
@@ -164,7 +160,7 @@
$(ref $field),*
) = *self;
- $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*
+ $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
}
}
};
diff --git a/src/librustc/mir/visit.rs b/src/librustc/mir/visit.rs
index 0180256..e582803 100644
--- a/src/librustc/mir/visit.rs
+++ b/src/librustc/mir/visit.rs
@@ -38,10 +38,10 @@
// ```rust
// fn super_basic_block_data(&mut self,
// block: BasicBlock,
-// data: & $($mutability)* BasicBlockData<'tcx>) {
+// data: & $($mutability)? BasicBlockData<'tcx>) {
// let BasicBlockData {
-// ref $($mutability)* statements,
-// ref $($mutability)* terminator,
+// statements,
+// terminator,
// is_cleanup: _
// } = *data;
//
@@ -67,111 +67,111 @@
// `is_cleanup` above.
macro_rules! make_mir_visitor {
- ($visitor_trait_name:ident, $($mutability:ident)*) => {
+ ($visitor_trait_name:ident, $($mutability:ident)?) => {
pub trait $visitor_trait_name<'tcx> {
// Override these, and call `self.super_xxx` to revert back to the
// default behavior.
- fn visit_mir(&mut self, mir: & $($mutability)* Mir<'tcx>) {
+ fn visit_mir(&mut self, mir: & $($mutability)? Mir<'tcx>) {
self.super_mir(mir);
}
fn visit_basic_block_data(&mut self,
block: BasicBlock,
- data: & $($mutability)* BasicBlockData<'tcx>) {
+ data: & $($mutability)? BasicBlockData<'tcx>) {
self.super_basic_block_data(block, data);
}
fn visit_source_scope_data(&mut self,
- scope_data: & $($mutability)* SourceScopeData) {
+ scope_data: & $($mutability)? SourceScopeData) {
self.super_source_scope_data(scope_data);
}
fn visit_statement(&mut self,
block: BasicBlock,
- statement: & $($mutability)* Statement<'tcx>,
+ statement: & $($mutability)? Statement<'tcx>,
location: Location) {
self.super_statement(block, statement, location);
}
fn visit_assign(&mut self,
block: BasicBlock,
- place: & $($mutability)* Place<'tcx>,
- rvalue: & $($mutability)* Rvalue<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
+ rvalue: & $($mutability)? Rvalue<'tcx>,
location: Location) {
self.super_assign(block, place, rvalue, location);
}
fn visit_terminator(&mut self,
block: BasicBlock,
- terminator: & $($mutability)* Terminator<'tcx>,
+ terminator: & $($mutability)? Terminator<'tcx>,
location: Location) {
self.super_terminator(block, terminator, location);
}
fn visit_terminator_kind(&mut self,
block: BasicBlock,
- kind: & $($mutability)* TerminatorKind<'tcx>,
+ kind: & $($mutability)? TerminatorKind<'tcx>,
location: Location) {
self.super_terminator_kind(block, kind, location);
}
fn visit_assert_message(&mut self,
- msg: & $($mutability)* AssertMessage<'tcx>,
+ msg: & $($mutability)? AssertMessage<'tcx>,
location: Location) {
self.super_assert_message(msg, location);
}
fn visit_rvalue(&mut self,
- rvalue: & $($mutability)* Rvalue<'tcx>,
+ rvalue: & $($mutability)? Rvalue<'tcx>,
location: Location) {
self.super_rvalue(rvalue, location);
}
fn visit_operand(&mut self,
- operand: & $($mutability)* Operand<'tcx>,
+ operand: & $($mutability)? Operand<'tcx>,
location: Location) {
self.super_operand(operand, location);
}
fn visit_ascribe_user_ty(&mut self,
- place: & $($mutability)* Place<'tcx>,
- variance: & $($mutability)* ty::Variance,
- user_ty: & $($mutability)* UserTypeProjection<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
+ variance: & $($mutability)? ty::Variance,
+ user_ty: & $($mutability)? UserTypeProjection<'tcx>,
location: Location) {
self.super_ascribe_user_ty(place, variance, user_ty, location);
}
fn visit_retag(&mut self,
- kind: & $($mutability)* RetagKind,
- place: & $($mutability)* Place<'tcx>,
+ kind: & $($mutability)? RetagKind,
+ place: & $($mutability)? Place<'tcx>,
location: Location) {
self.super_retag(kind, place, location);
}
fn visit_place(&mut self,
- place: & $($mutability)* Place<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
self.super_place(place, context, location);
}
fn visit_static(&mut self,
- static_: & $($mutability)* Static<'tcx>,
+ static_: & $($mutability)? Static<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
self.super_static(static_, context, location);
}
fn visit_projection(&mut self,
- place: & $($mutability)* PlaceProjection<'tcx>,
+ place: & $($mutability)? PlaceProjection<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
self.super_projection(place, context, location);
}
fn visit_projection_elem(&mut self,
- place: & $($mutability)* PlaceElem<'tcx>,
+ place: & $($mutability)? PlaceElem<'tcx>,
location: Location) {
self.super_projection_elem(place, location);
}
@@ -183,36 +183,36 @@
}
fn visit_constant(&mut self,
- constant: & $($mutability)* Constant<'tcx>,
+ constant: & $($mutability)? Constant<'tcx>,
location: Location) {
self.super_constant(constant, location);
}
fn visit_def_id(&mut self,
- def_id: & $($mutability)* DefId,
+ def_id: & $($mutability)? DefId,
_: Location) {
self.super_def_id(def_id);
}
fn visit_span(&mut self,
- span: & $($mutability)* Span) {
+ span: & $($mutability)? Span) {
self.super_span(span);
}
fn visit_source_info(&mut self,
- source_info: & $($mutability)* SourceInfo) {
+ source_info: & $($mutability)? SourceInfo) {
self.super_source_info(source_info);
}
fn visit_ty(&mut self,
- ty: & $($mutability)* Ty<'tcx>,
+ ty: & $($mutability)? Ty<'tcx>,
_: TyContext) {
self.super_ty(ty);
}
fn visit_user_type_projection(
&mut self,
- ty: & $($mutability)* UserTypeProjection<'tcx>,
+ ty: & $($mutability)? UserTypeProjection<'tcx>,
) {
self.super_user_type_projection(ty);
}
@@ -220,55 +220,55 @@
fn visit_user_type_annotation(
&mut self,
index: UserTypeAnnotationIndex,
- ty: & $($mutability)* CanonicalUserTypeAnnotation<'tcx>,
+ ty: & $($mutability)? CanonicalUserTypeAnnotation<'tcx>,
) {
self.super_user_type_annotation(index, ty);
}
fn visit_region(&mut self,
- region: & $($mutability)* ty::Region<'tcx>,
+ region: & $($mutability)? ty::Region<'tcx>,
_: Location) {
self.super_region(region);
}
fn visit_const(&mut self,
- constant: & $($mutability)* &'tcx ty::LazyConst<'tcx>,
+ constant: & $($mutability)? &'tcx ty::LazyConst<'tcx>,
_: Location) {
self.super_const(constant);
}
fn visit_substs(&mut self,
- substs: & $($mutability)* &'tcx Substs<'tcx>,
+ substs: & $($mutability)? &'tcx Substs<'tcx>,
_: Location) {
self.super_substs(substs);
}
fn visit_closure_substs(&mut self,
- substs: & $($mutability)* ClosureSubsts<'tcx>,
+ substs: & $($mutability)? ClosureSubsts<'tcx>,
_: Location) {
self.super_closure_substs(substs);
}
fn visit_generator_substs(&mut self,
- substs: & $($mutability)* GeneratorSubsts<'tcx>,
+ substs: & $($mutability)? GeneratorSubsts<'tcx>,
_: Location) {
self.super_generator_substs(substs);
}
fn visit_local_decl(&mut self,
local: Local,
- local_decl: & $($mutability)* LocalDecl<'tcx>) {
+ local_decl: & $($mutability)? LocalDecl<'tcx>) {
self.super_local_decl(local, local_decl);
}
fn visit_local(&mut self,
- _local: & $($mutability)* Local,
+ _local: & $($mutability)? Local,
_context: PlaceContext<'tcx>,
_location: Location) {
}
fn visit_source_scope(&mut self,
- scope: & $($mutability)* SourceScope) {
+ scope: & $($mutability)? SourceScope) {
self.super_source_scope(scope);
}
@@ -276,8 +276,8 @@
// not meant to be overridden.
fn super_mir(&mut self,
- mir: & $($mutability)* Mir<'tcx>) {
- if let Some(yield_ty) = &$($mutability)* mir.yield_ty {
+ mir: & $($mutability)? Mir<'tcx>) {
+ if let Some(yield_ty) = &$($mutability)? mir.yield_ty {
self.visit_ty(yield_ty, TyContext::YieldTy(SourceInfo {
span: mir.span,
scope: OUTERMOST_SOURCE_SCOPE,
@@ -291,21 +291,21 @@
(mut) => (mir.basic_blocks_mut().iter_enumerated_mut());
() => (mir.basic_blocks().iter_enumerated());
};
- for (bb, data) in basic_blocks!($($mutability)*) {
+ for (bb, data) in basic_blocks!($($mutability)?) {
self.visit_basic_block_data(bb, data);
}
- for scope in &$($mutability)* mir.source_scopes {
+ for scope in &$($mutability)? mir.source_scopes {
self.visit_source_scope_data(scope);
}
- self.visit_ty(&$($mutability)* mir.return_ty(), TyContext::ReturnTy(SourceInfo {
+ self.visit_ty(&$($mutability)? mir.return_ty(), TyContext::ReturnTy(SourceInfo {
span: mir.span,
scope: OUTERMOST_SOURCE_SCOPE,
}));
for local in mir.local_decls.indices() {
- self.visit_local_decl(local, & $($mutability)* mir.local_decls[local]);
+ self.visit_local_decl(local, & $($mutability)? mir.local_decls[local]);
}
macro_rules! type_annotations {
@@ -313,23 +313,23 @@
() => (mir.user_type_annotations.iter_enumerated());
};
- for (index, annotation) in type_annotations!($($mutability)*) {
+ for (index, annotation) in type_annotations!($($mutability)?) {
self.visit_user_type_annotation(
index, annotation
);
}
- self.visit_span(&$($mutability)* mir.span);
+ self.visit_span(&$($mutability)? mir.span);
}
fn super_basic_block_data(&mut self,
block: BasicBlock,
- data: & $($mutability)* BasicBlockData<'tcx>) {
+ data: & $($mutability)? BasicBlockData<'tcx>) {
let BasicBlockData {
- ref $($mutability)* statements,
- ref $($mutability)* terminator,
+ statements,
+ terminator,
is_cleanup: _
- } = *data;
+ } = data;
let mut index = 0;
for statement in statements {
@@ -338,92 +338,83 @@
index += 1;
}
- if let Some(ref $($mutability)* terminator) = *terminator {
+ if let Some(terminator) = terminator {
let location = Location { block: block, statement_index: index };
self.visit_terminator(block, terminator, location);
}
}
- fn super_source_scope_data(&mut self,
- scope_data: & $($mutability)* SourceScopeData) {
+ fn super_source_scope_data(&mut self, scope_data: & $($mutability)? SourceScopeData) {
let SourceScopeData {
- ref $($mutability)* span,
- ref $($mutability)* parent_scope,
- } = *scope_data;
+ span,
+ parent_scope,
+ } = scope_data;
self.visit_span(span);
- if let Some(ref $($mutability)* parent_scope) = *parent_scope {
+ if let Some(parent_scope) = parent_scope {
self.visit_source_scope(parent_scope);
}
}
fn super_statement(&mut self,
block: BasicBlock,
- statement: & $($mutability)* Statement<'tcx>,
+ statement: & $($mutability)? Statement<'tcx>,
location: Location) {
let Statement {
- ref $($mutability)* source_info,
- ref $($mutability)* kind,
- } = *statement;
+ source_info,
+ kind,
+ } = statement;
self.visit_source_info(source_info);
- match *kind {
- StatementKind::Assign(ref $($mutability)* place,
- ref $($mutability)* rvalue) => {
+ match kind {
+ StatementKind::Assign(place, rvalue) => {
self.visit_assign(block, place, rvalue, location);
}
- StatementKind::FakeRead(_, ref $($mutability)* place) => {
+ StatementKind::FakeRead(_, place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
location
);
}
- StatementKind::SetDiscriminant{ ref $($mutability)* place, .. } => {
+ StatementKind::SetDiscriminant { place, .. } => {
self.visit_place(
place,
PlaceContext::MutatingUse(MutatingUseContext::Store),
location
);
}
- StatementKind::StorageLive(ref $($mutability)* local) => {
+ StatementKind::StorageLive(local) => {
self.visit_local(
local,
PlaceContext::NonUse(NonUseContext::StorageLive),
location
);
}
- StatementKind::StorageDead(ref $($mutability)* local) => {
+ StatementKind::StorageDead(local) => {
self.visit_local(
local,
PlaceContext::NonUse(NonUseContext::StorageDead),
location
);
}
- StatementKind::InlineAsm { ref $($mutability)* outputs,
- ref $($mutability)* inputs,
- asm: _ } => {
- for output in & $($mutability)* outputs[..] {
+ StatementKind::InlineAsm { outputs, inputs, asm: _ } => {
+ for output in & $($mutability)? outputs[..] {
self.visit_place(
output,
PlaceContext::MutatingUse(MutatingUseContext::AsmOutput),
location
);
}
- for (span, input) in & $($mutability)* inputs[..] {
+ for (span, input) in & $($mutability)? inputs[..] {
self.visit_span(span);
self.visit_operand(input, location);
}
}
- StatementKind::Retag ( ref $($mutability)* kind,
- ref $($mutability)* place ) => {
+ StatementKind::Retag(kind, place) => {
self.visit_retag(kind, place, location);
}
- StatementKind::AscribeUserType(
- ref $($mutability)* place,
- ref $($mutability)* variance,
- ref $($mutability)* user_ty,
- ) => {
+ StatementKind::AscribeUserType(place, variance, user_ty) => {
self.visit_ascribe_user_ty(place, variance, user_ty, location);
}
StatementKind::Nop => {}
@@ -432,8 +423,8 @@
fn super_assign(&mut self,
_block: BasicBlock,
- place: &$($mutability)* Place<'tcx>,
- rvalue: &$($mutability)* Rvalue<'tcx>,
+ place: &$($mutability)? Place<'tcx>,
+ rvalue: &$($mutability)? Rvalue<'tcx>,
location: Location) {
self.visit_place(
place,
@@ -445,12 +436,9 @@
fn super_terminator(&mut self,
block: BasicBlock,
- terminator: &$($mutability)* Terminator<'tcx>,
+ terminator: &$($mutability)? Terminator<'tcx>,
location: Location) {
- let Terminator {
- ref $($mutability)* source_info,
- ref $($mutability)* kind,
- } = *terminator;
+ let Terminator { source_info, kind } = terminator;
self.visit_source_info(source_info);
self.visit_terminator_kind(block, kind, location);
@@ -458,21 +446,23 @@
fn super_terminator_kind(&mut self,
block: BasicBlock,
- kind: & $($mutability)* TerminatorKind<'tcx>,
+ kind: & $($mutability)? TerminatorKind<'tcx>,
source_location: Location) {
- match *kind {
+ match kind {
TerminatorKind::Goto { target } => {
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
}
- TerminatorKind::SwitchInt { ref $($mutability)* discr,
- ref $($mutability)* switch_ty,
- values: _,
- ref targets } => {
+ TerminatorKind::SwitchInt {
+ discr,
+ switch_ty,
+ values: _,
+ targets
+ } => {
self.visit_operand(discr, source_location);
self.visit_ty(switch_ty, TyContext::Location(source_location));
- for &target in targets {
- self.visit_branch(block, target);
+ for target in targets {
+ self.visit_branch(block, *target);
}
}
@@ -483,113 +473,120 @@
TerminatorKind::Unreachable => {
}
- TerminatorKind::Drop { ref $($mutability)* location,
- target,
- unwind } => {
+ TerminatorKind::Drop {
+ location,
+ target,
+ unwind,
+ } => {
self.visit_place(
location,
PlaceContext::MutatingUse(MutatingUseContext::Drop),
source_location
);
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
unwind.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::DropAndReplace { ref $($mutability)* location,
- ref $($mutability)* value,
- target,
- unwind } => {
+ TerminatorKind::DropAndReplace {
+ location,
+ value,
+ target,
+ unwind,
+ } => {
self.visit_place(
location,
PlaceContext::MutatingUse(MutatingUseContext::Drop),
source_location
);
self.visit_operand(value, source_location);
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
unwind.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::Call { ref $($mutability)* func,
- ref $($mutability)* args,
- ref $($mutability)* destination,
- cleanup,
- from_hir_call: _, } => {
+ TerminatorKind::Call {
+ func,
+ args,
+ destination,
+ cleanup,
+ from_hir_call: _,
+ } => {
self.visit_operand(func, source_location);
for arg in args {
self.visit_operand(arg, source_location);
}
- if let Some((ref $($mutability)* destination, target)) = *destination {
+ if let Some((destination, target)) = destination {
self.visit_place(
destination,
PlaceContext::MutatingUse(MutatingUseContext::Call),
source_location
);
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
}
cleanup.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::Assert { ref $($mutability)* cond,
- expected: _,
- ref $($mutability)* msg,
- target,
- cleanup } => {
+ TerminatorKind::Assert {
+ cond,
+ expected: _,
+ msg,
+ target,
+ cleanup,
+ } => {
self.visit_operand(cond, source_location);
self.visit_assert_message(msg, source_location);
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
cleanup.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::Yield { ref $($mutability)* value,
- resume,
- drop } => {
+ TerminatorKind::Yield {
+ value,
+ resume,
+ drop,
+ } => {
self.visit_operand(value, source_location);
- self.visit_branch(block, resume);
+ self.visit_branch(block, *resume);
drop.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::FalseEdges { real_target, ref imaginary_targets} => {
- self.visit_branch(block, real_target);
+ TerminatorKind::FalseEdges { real_target, imaginary_targets } => {
+ self.visit_branch(block, *real_target);
for target in imaginary_targets {
self.visit_branch(block, *target);
}
}
TerminatorKind::FalseUnwind { real_target, unwind } => {
- self.visit_branch(block, real_target);
+ self.visit_branch(block, *real_target);
if let Some(unwind) = unwind {
- self.visit_branch(block, unwind);
+ self.visit_branch(block, *unwind);
}
}
}
}
fn super_assert_message(&mut self,
- msg: & $($mutability)* AssertMessage<'tcx>,
+ msg: & $($mutability)? AssertMessage<'tcx>,
location: Location) {
use crate::mir::interpret::EvalErrorKind::*;
- if let BoundsCheck {
- ref $($mutability)* len,
- ref $($mutability)* index
- } = *msg {
+ if let BoundsCheck { len, index } = msg {
self.visit_operand(len, location);
self.visit_operand(index, location);
}
}
fn super_rvalue(&mut self,
- rvalue: & $($mutability)* Rvalue<'tcx>,
+ rvalue: & $($mutability)? Rvalue<'tcx>,
location: Location) {
- match *rvalue {
- Rvalue::Use(ref $($mutability)* operand) => {
+ match rvalue {
+ Rvalue::Use(operand) => {
self.visit_operand(operand, location);
}
- Rvalue::Repeat(ref $($mutability)* value, _) => {
+ Rvalue::Repeat(value, _) => {
self.visit_operand(value, location);
}
- Rvalue::Ref(ref $($mutability)* r, bk, ref $($mutability)* path) => {
+ Rvalue::Ref(r, bk, path) => {
self.visit_region(r, location);
let ctx = match bk {
BorrowKind::Shared => PlaceContext::NonMutatingUse(
@@ -607,7 +604,7 @@
self.visit_place(path, ctx, location);
}
- Rvalue::Len(ref $($mutability)* path) => {
+ Rvalue::Len(path) => {
self.visit_place(
path,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
@@ -615,28 +612,22 @@
);
}
- Rvalue::Cast(_cast_kind,
- ref $($mutability)* operand,
- ref $($mutability)* ty) => {
+ Rvalue::Cast(_cast_kind, operand, ty) => {
self.visit_operand(operand, location);
self.visit_ty(ty, TyContext::Location(location));
}
- Rvalue::BinaryOp(_bin_op,
- ref $($mutability)* lhs,
- ref $($mutability)* rhs) |
- Rvalue::CheckedBinaryOp(_bin_op,
- ref $($mutability)* lhs,
- ref $($mutability)* rhs) => {
+ Rvalue::BinaryOp(_bin_op, lhs, rhs)
+ | Rvalue::CheckedBinaryOp(_bin_op, lhs, rhs) => {
self.visit_operand(lhs, location);
self.visit_operand(rhs, location);
}
- Rvalue::UnaryOp(_un_op, ref $($mutability)* op) => {
+ Rvalue::UnaryOp(_un_op, op) => {
self.visit_operand(op, location);
}
- Rvalue::Discriminant(ref $($mutability)* place) => {
+ Rvalue::Discriminant(place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
@@ -644,34 +635,39 @@
);
}
- Rvalue::NullaryOp(_op, ref $($mutability)* ty) => {
+ Rvalue::NullaryOp(_op, ty) => {
self.visit_ty(ty, TyContext::Location(location));
}
- Rvalue::Aggregate(ref $($mutability)* kind,
- ref $($mutability)* operands) => {
- let kind = &$($mutability)* **kind;
- match *kind {
- AggregateKind::Array(ref $($mutability)* ty) => {
+ Rvalue::Aggregate(kind, operands) => {
+ let kind = &$($mutability)? **kind;
+ match kind {
+ AggregateKind::Array(ty) => {
self.visit_ty(ty, TyContext::Location(location));
}
AggregateKind::Tuple => {
}
- AggregateKind::Adt(_adt_def,
- _variant_index,
- ref $($mutability)* substs,
- _user_substs,
- _active_field_index) => {
+ AggregateKind::Adt(
+ _adt_def,
+ _variant_index,
+ substs,
+ _user_substs,
+ _active_field_index
+ ) => {
self.visit_substs(substs, location);
}
- AggregateKind::Closure(ref $($mutability)* def_id,
- ref $($mutability)* closure_substs) => {
+ AggregateKind::Closure(
+ def_id,
+ closure_substs
+ ) => {
self.visit_def_id(def_id, location);
self.visit_closure_substs(closure_substs, location);
}
- AggregateKind::Generator(ref $($mutability)* def_id,
- ref $($mutability)* generator_substs,
- _movability) => {
+ AggregateKind::Generator(
+ def_id,
+ generator_substs,
+ _movability,
+ ) => {
self.visit_def_id(def_id, location);
self.visit_generator_substs(generator_substs, location);
}
@@ -685,33 +681,33 @@
}
fn super_operand(&mut self,
- operand: & $($mutability)* Operand<'tcx>,
+ operand: & $($mutability)? Operand<'tcx>,
location: Location) {
- match *operand {
- Operand::Copy(ref $($mutability)* place) => {
+ match operand {
+ Operand::Copy(place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
location
);
}
- Operand::Move(ref $($mutability)* place) => {
+ Operand::Move(place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Move),
location
);
}
- Operand::Constant(ref $($mutability)* constant) => {
+ Operand::Constant(constant) => {
self.visit_constant(constant, location);
}
}
}
fn super_ascribe_user_ty(&mut self,
- place: & $($mutability)* Place<'tcx>,
- _variance: & $($mutability)* ty::Variance,
- user_ty: & $($mutability)* UserTypeProjection<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
+ _variance: & $($mutability)? ty::Variance,
+ user_ty: & $($mutability)? UserTypeProjection<'tcx>,
location: Location) {
self.visit_place(
place,
@@ -722,8 +718,8 @@
}
fn super_retag(&mut self,
- _kind: & $($mutability)* RetagKind,
- place: & $($mutability)* Place<'tcx>,
+ _kind: & $($mutability)? RetagKind,
+ place: & $($mutability)? Place<'tcx>,
location: Location) {
self.visit_place(
place,
@@ -733,45 +729,39 @@
}
fn super_place(&mut self,
- place: & $($mutability)* Place<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
- match *place {
- Place::Local(ref $($mutability)* local) => {
+ match place {
+ Place::Local(local) => {
self.visit_local(local, context, location);
}
- Place::Static(ref $($mutability)* static_) => {
+ Place::Static(static_) => {
self.visit_static(static_, context, location);
}
- Place::Promoted(ref $($mutability)* promoted) => {
- self.visit_ty(& $($mutability)* promoted.1, TyContext::Location(location));
+ Place::Promoted(promoted) => {
+ self.visit_ty(& $($mutability)? promoted.1, TyContext::Location(location));
},
- Place::Projection(ref $($mutability)* proj) => {
+ Place::Projection(proj) => {
self.visit_projection(proj, context, location);
}
}
}
fn super_static(&mut self,
- static_: & $($mutability)* Static<'tcx>,
+ static_: & $($mutability)? Static<'tcx>,
_context: PlaceContext<'tcx>,
location: Location) {
- let Static {
- ref $($mutability)* def_id,
- ref $($mutability)* ty,
- } = *static_;
+ let Static { def_id, ty } = static_;
self.visit_def_id(def_id, location);
self.visit_ty(ty, TyContext::Location(location));
}
fn super_projection(&mut self,
- proj: & $($mutability)* PlaceProjection<'tcx>,
+ proj: & $($mutability)? PlaceProjection<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
- let Projection {
- ref $($mutability)* base,
- ref $($mutability)* elem,
- } = *proj;
+ let Projection { base, elem } = proj;
let context = if context.is_mutating_use() {
PlaceContext::MutatingUse(MutatingUseContext::Projection)
} else {
@@ -782,17 +772,17 @@
}
fn super_projection_elem(&mut self,
- proj: & $($mutability)* PlaceElem<'tcx>,
+ proj: & $($mutability)? PlaceElem<'tcx>,
location: Location) {
- match *proj {
+ match proj {
ProjectionElem::Deref => {
}
ProjectionElem::Subslice { from: _, to: _ } => {
}
- ProjectionElem::Field(_field, ref $($mutability)* ty) => {
+ ProjectionElem::Field(_field, ty) => {
self.visit_ty(ty, TyContext::Location(location));
}
- ProjectionElem::Index(ref $($mutability)* local) => {
+ ProjectionElem::Index(local) => {
self.visit_local(
local,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
@@ -810,24 +800,24 @@
fn super_local_decl(&mut self,
local: Local,
- local_decl: & $($mutability)* LocalDecl<'tcx>) {
+ local_decl: & $($mutability)? LocalDecl<'tcx>) {
let LocalDecl {
mutability: _,
- ref $($mutability)* ty,
- ref $($mutability)* user_ty,
+ ty,
+ user_ty,
name: _,
- ref $($mutability)* source_info,
- ref $($mutability)* visibility_scope,
+ source_info,
+ visibility_scope,
internal: _,
is_user_variable: _,
is_block_tail: _,
- } = *local_decl;
+ } = local_decl;
self.visit_ty(ty, TyContext::LocalDecl {
local,
source_info: *source_info,
});
- for (user_ty, _) in & $($mutability)* user_ty.contents {
+ for (user_ty, _) in & $($mutability)? user_ty.contents {
self.visit_user_type_projection(user_ty);
}
self.visit_source_info(source_info);
@@ -835,7 +825,7 @@
}
fn super_source_scope(&mut self,
- _scope: & $($mutability)* SourceScope) {
+ _scope: & $($mutability)? SourceScope) {
}
fn super_branch(&mut self,
@@ -844,14 +834,14 @@
}
fn super_constant(&mut self,
- constant: & $($mutability)* Constant<'tcx>,
+ constant: & $($mutability)? Constant<'tcx>,
location: Location) {
let Constant {
- ref $($mutability)* span,
- ref $($mutability)* ty,
- ref $($mutability)* user_ty,
- ref $($mutability)* literal,
- } = *constant;
+ span,
+ ty,
+ user_ty,
+ literal,
+ } = constant;
self.visit_span(span);
self.visit_ty(ty, TyContext::Location(location));
@@ -859,17 +849,17 @@
self.visit_const(literal, location);
}
- fn super_def_id(&mut self, _def_id: & $($mutability)* DefId) {
+ fn super_def_id(&mut self, _def_id: & $($mutability)? DefId) {
}
- fn super_span(&mut self, _span: & $($mutability)* Span) {
+ fn super_span(&mut self, _span: & $($mutability)? Span) {
}
- fn super_source_info(&mut self, source_info: & $($mutability)* SourceInfo) {
+ fn super_source_info(&mut self, source_info: & $($mutability)? SourceInfo) {
let SourceInfo {
- ref $($mutability)* span,
- ref $($mutability)* scope,
- } = *source_info;
+ span,
+ scope,
+ } = source_info;
self.visit_span(span);
self.visit_source_scope(scope);
@@ -877,49 +867,49 @@
fn super_user_type_projection(
&mut self,
- _ty: & $($mutability)* UserTypeProjection<'tcx>,
+ _ty: & $($mutability)? UserTypeProjection<'tcx>,
) {
}
fn super_user_type_annotation(
&mut self,
_index: UserTypeAnnotationIndex,
- ty: & $($mutability)* CanonicalUserTypeAnnotation<'tcx>,
+ ty: & $($mutability)? CanonicalUserTypeAnnotation<'tcx>,
) {
- self.visit_span(& $($mutability)* ty.span);
- self.visit_ty(& $($mutability)* ty.inferred_ty, TyContext::UserTy(ty.span));
+ self.visit_span(& $($mutability)? ty.span);
+ self.visit_ty(& $($mutability)? ty.inferred_ty, TyContext::UserTy(ty.span));
}
- fn super_ty(&mut self, _ty: & $($mutability)* Ty<'tcx>) {
+ fn super_ty(&mut self, _ty: & $($mutability)? Ty<'tcx>) {
}
- fn super_region(&mut self, _region: & $($mutability)* ty::Region<'tcx>) {
+ fn super_region(&mut self, _region: & $($mutability)? ty::Region<'tcx>) {
}
- fn super_const(&mut self, _const: & $($mutability)* &'tcx ty::LazyConst<'tcx>) {
+ fn super_const(&mut self, _const: & $($mutability)? &'tcx ty::LazyConst<'tcx>) {
}
- fn super_substs(&mut self, _substs: & $($mutability)* &'tcx Substs<'tcx>) {
+ fn super_substs(&mut self, _substs: & $($mutability)? &'tcx Substs<'tcx>) {
}
fn super_generator_substs(&mut self,
- _substs: & $($mutability)* GeneratorSubsts<'tcx>) {
+ _substs: & $($mutability)? GeneratorSubsts<'tcx>) {
}
fn super_closure_substs(&mut self,
- _substs: & $($mutability)* ClosureSubsts<'tcx>) {
+ _substs: & $($mutability)? ClosureSubsts<'tcx>) {
}
// Convenience methods
- fn visit_location(&mut self, mir: & $($mutability)* Mir<'tcx>, location: Location) {
- let basic_block = & $($mutability)* mir[location.block];
+ fn visit_location(&mut self, mir: & $($mutability)? Mir<'tcx>, location: Location) {
+ let basic_block = & $($mutability)? mir[location.block];
if basic_block.statements.len() == location.statement_index {
- if let Some(ref $($mutability)* terminator) = basic_block.terminator {
+ if let Some(ref $($mutability)? terminator) = basic_block.terminator {
self.visit_terminator(location.block, terminator, location)
}
} else {
- let statement = & $($mutability)*
+ let statement = & $($mutability)?
basic_block.statements[location.statement_index];
self.visit_statement(location.block, statement, location)
}
diff --git a/src/librustc_allocator/lib.rs b/src/librustc_allocator/lib.rs
index 16b9ccf..9d6e728 100644
--- a/src/librustc_allocator/lib.rs
+++ b/src/librustc_allocator/lib.rs
@@ -1,3 +1,4 @@
+#![feature(nll)]
#![feature(rustc_private)]
#![deny(rust_2018_idioms)]
diff --git a/src/librustc_apfloat/lib.rs b/src/librustc_apfloat/lib.rs
index f79d448..6653df8 100644
--- a/src/librustc_apfloat/lib.rs
+++ b/src/librustc_apfloat/lib.rs
@@ -34,6 +34,7 @@
#![forbid(unsafe_code)]
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(try_from)]
// See librustc_cratesio_shim/Cargo.toml for a comment explaining this.
#[allow(unused_extern_crates)]
diff --git a/src/librustc_asan/lib.rs b/src/librustc_asan/lib.rs
index 568bb54..3bdb86d 100644
--- a/src/librustc_asan/lib.rs
+++ b/src/librustc_asan/lib.rs
@@ -1,4 +1,5 @@
#![sanitizer_runtime]
+#![feature(nll)]
#![feature(sanitizer_runtime)]
#![feature(staged_api)]
#![no_std]
diff --git a/src/librustc_codegen_ssa/Cargo.toml b/src/librustc_codegen_ssa/Cargo.toml
index 5099449..0aba435 100644
--- a/src/librustc_codegen_ssa/Cargo.toml
+++ b/src/librustc_codegen_ssa/Cargo.toml
@@ -2,6 +2,7 @@
authors = ["The Rust Project Developers"]
name = "rustc_codegen_ssa"
version = "0.0.0"
+edition = "2018"
[lib]
name = "rustc_codegen_ssa"
diff --git a/src/librustc_codegen_ssa/back/link.rs b/src/librustc_codegen_ssa/back/link.rs
index 2a5ecf9..7f1aeba 100644
--- a/src/librustc_codegen_ssa/back/link.rs
+++ b/src/librustc_codegen_ssa/back/link.rs
@@ -9,7 +9,7 @@
use rustc::hir::def_id::CrateNum;
use super::command::Command;
-use CrateInfo;
+use crate::CrateInfo;
use cc::windows_registry;
use std::fs;
diff --git a/src/librustc_codegen_ssa/back/lto.rs b/src/librustc_codegen_ssa/back/lto.rs
index f0fb115..7f0eba7 100644
--- a/src/librustc_codegen_ssa/back/lto.rs
+++ b/src/librustc_codegen_ssa/back/lto.rs
@@ -1,6 +1,6 @@
use super::write::CodegenContext;
-use traits::*;
-use ModuleCodegen;
+use crate::traits::*;
+use crate::ModuleCodegen;
use rustc::util::time_graph::Timeline;
use rustc_errors::FatalError;
diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs
index 67d4d40..eeb191b 100644
--- a/src/librustc_codegen_ssa/back/write.rs
+++ b/src/librustc_codegen_ssa/back/write.rs
@@ -1,12 +1,12 @@
-use {ModuleCodegen, ModuleKind, CachedModuleCodegen, CompiledModule, CrateInfo, CodegenResults,
- RLIB_BYTECODE_EXTENSION};
+use crate::{ModuleCodegen, ModuleKind, CachedModuleCodegen, CompiledModule, CrateInfo,
+ CodegenResults, RLIB_BYTECODE_EXTENSION};
use super::linker::LinkerInfo;
use super::lto::{self, SerializedModule};
use super::link::{self, remove, get_linker};
use super::command::Command;
use super::symbol_export::ExportedSymbols;
-use memmap;
+use crate::traits::*;
use rustc_incremental::{copy_cgu_workproducts_to_incr_comp_cache_dir,
in_incr_comp_dir, in_incr_comp_dir_sess};
use rustc::dep_graph::{WorkProduct, WorkProductId, WorkProductFileKind};
@@ -16,7 +16,6 @@
use rustc::session::Session;
use rustc::util::nodemap::FxHashMap;
use rustc::util::time_graph::{self, TimeGraph, Timeline};
-use traits::*;
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc::ty::TyCtxt;
use rustc::util::common::{time_depth, set_time_depth, print_time_passes_entry};
diff --git a/src/librustc_codegen_ssa/base.rs b/src/librustc_codegen_ssa/base.rs
index 84e55ce..988e3bb 100644
--- a/src/librustc_codegen_ssa/base.rs
+++ b/src/librustc_codegen_ssa/base.rs
@@ -13,7 +13,7 @@
//! but one llvm::Type corresponds to many `Ty`s; for instance, tup(int, int,
//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type.
-use {ModuleCodegen, ModuleKind, CachedModuleCodegen};
+use crate::{ModuleCodegen, ModuleKind, CachedModuleCodegen};
use rustc::dep_graph::cgu_reuse_tracker::CguReuse;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
@@ -28,26 +28,26 @@
use rustc::util::profiling::ProfileCategory;
use rustc::session::config::{self, EntryFnType, Lto};
use rustc::session::Session;
-use mir::place::PlaceRef;
-use back::write::{OngoingCodegen, start_async_codegen, submit_pre_lto_module_to_llvm,
- submit_post_lto_module_to_llvm};
-use {MemFlags, CrateInfo};
-use callee;
use rustc_mir::monomorphize::item::DefPathBasedNames;
-use common::{RealPredicate, TypeKind, IntPredicate};
-use meth;
-use mir;
use rustc::util::time_graph;
use rustc_mir::monomorphize::Instance;
use rustc_mir::monomorphize::partitioning::{CodegenUnit, CodegenUnitExt};
-use mono_item::MonoItem;
use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::sync::Lrc;
use rustc_codegen_utils::{symbol_names_test, check_for_rustc_errors_attr};
use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA};
+use crate::mir::place::PlaceRef;
+use crate::back::write::{OngoingCodegen, start_async_codegen, submit_pre_lto_module_to_llvm,
+ submit_post_lto_module_to_llvm};
+use crate::{MemFlags, CrateInfo};
+use crate::callee;
+use crate::common::{RealPredicate, TypeKind, IntPredicate};
+use crate::meth;
+use crate::mir;
+use crate::mono_item::MonoItem;
-use traits::*;
+use crate::traits::*;
use std::any::Any;
use std::cmp;
@@ -58,7 +58,7 @@
use syntax::attr;
use rustc::hir;
-use mir::operand::OperandValue;
+use crate::mir::operand::OperandValue;
use std::marker::PhantomData;
diff --git a/src/librustc_codegen_ssa/callee.rs b/src/librustc_codegen_ssa/callee.rs
index aa13e52..3665d45 100644
--- a/src/librustc_codegen_ssa/callee.rs
+++ b/src/librustc_codegen_ssa/callee.rs
@@ -1,4 +1,4 @@
-use traits::*;
+use crate::traits::*;
use rustc::ty;
use rustc::ty::subst::Substs;
use rustc::hir::def_id::DefId;
diff --git a/src/librustc_codegen_ssa/common.rs b/src/librustc_codegen_ssa/common.rs
index cfb5d24..1b87f16 100644
--- a/src/librustc_codegen_ssa/common.rs
+++ b/src/librustc_codegen_ssa/common.rs
@@ -5,11 +5,11 @@
use rustc::hir::def_id::DefId;
use rustc::middle::lang_items::LangItem;
-use base;
-use traits::*;
+use crate::base;
+use crate::traits::*;
use rustc::hir;
-use traits::BuilderMethods;
+use crate::traits::BuilderMethods;
pub fn type_needs_drop<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
ty.needs_drop(tcx, ty::ParamEnv::reveal_all())
@@ -123,7 +123,7 @@
mod temp_stable_hash_impls {
use rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher,
HashStable};
- use ModuleCodegen;
+ use crate::ModuleCodegen;
impl<HCX, M> HashStable<HCX> for ModuleCodegen<M> {
fn hash_stable<W: StableHasherResult>(&self,
diff --git a/src/librustc_codegen_ssa/glue.rs b/src/librustc_codegen_ssa/glue.rs
index ed63e1e..e2b49de 100644
--- a/src/librustc_codegen_ssa/glue.rs
+++ b/src/librustc_codegen_ssa/glue.rs
@@ -2,12 +2,10 @@
//
// Code relating to drop glue.
-use std;
-
-use common::IntPredicate;
-use meth;
use rustc::ty::{self, Ty};
-use traits::*;
+use crate::common::IntPredicate;
+use crate::meth;
+use crate::traits::*;
pub fn size_and_align_of_dst<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
bx: &mut Bx,
diff --git a/src/librustc_codegen_ssa/lib.rs b/src/librustc_codegen_ssa/lib.rs
index 58b3f04..ad894bf 100644
--- a/src/librustc_codegen_ssa/lib.rs
+++ b/src/librustc_codegen_ssa/lib.rs
@@ -10,6 +10,9 @@
#![feature(nll)]
#![allow(unused_attributes)]
#![allow(dead_code)]
+#![deny(rust_2018_idioms)]
+#![allow(explicit_outlives_requirements)]
+#![allow(elided_lifetimes_in_paths)]
#![recursion_limit="256"]
@@ -17,27 +20,9 @@
//! The backend-agnostic functions of this crate use functions defined in various traits that
//! have to be implemented by each backends.
-#[macro_use] extern crate bitflags;
#[macro_use] extern crate log;
-extern crate rustc_apfloat;
-#[macro_use] extern crate rustc;
-extern crate rustc_target;
-extern crate rustc_mir;
+#[macro_use] extern crate rustc;
#[macro_use] extern crate syntax;
-extern crate syntax_pos;
-extern crate rustc_incremental;
-extern crate rustc_codegen_utils;
-extern crate rustc_data_structures;
-extern crate rustc_allocator;
-extern crate rustc_fs_util;
-extern crate serialize;
-extern crate rustc_errors;
-extern crate rustc_demangle;
-extern crate cc;
-extern crate libc;
-extern crate jobserver;
-extern crate memmap;
-extern crate num_cpus;
use std::path::PathBuf;
use rustc::dep_graph::WorkProduct;
@@ -133,7 +118,7 @@
Allocator,
}
-bitflags! {
+bitflags::bitflags! {
pub struct MemFlags: u8 {
const VOLATILE = 1 << 0;
const NONTEMPORAL = 1 << 1;
diff --git a/src/librustc_codegen_ssa/meth.rs b/src/librustc_codegen_ssa/meth.rs
index 98ad261..49f3c87 100644
--- a/src/librustc_codegen_ssa/meth.rs
+++ b/src/librustc_codegen_ssa/meth.rs
@@ -1,8 +1,8 @@
use rustc_target::abi::call::FnType;
-use callee;
use rustc_mir::monomorphize;
-use traits::*;
+use crate::callee;
+use crate::traits::*;
use rustc::ty::{self, Ty};
diff --git a/src/librustc_codegen_ssa/mir/analyze.rs b/src/librustc_codegen_ssa/mir/analyze.rs
index f3475d1..9fe2e58 100644
--- a/src/librustc_codegen_ssa/mir/analyze.rs
+++ b/src/librustc_codegen_ssa/mir/analyze.rs
@@ -10,7 +10,7 @@
use rustc::ty;
use rustc::ty::layout::{LayoutOf, HasTyCtxt};
use super::FunctionCx;
-use traits::*;
+use crate::traits::*;
pub fn non_ssa_locals<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
fx: &FunctionCx<'a, 'tcx, Bx>
diff --git a/src/librustc_codegen_ssa/mir/block.rs b/src/librustc_codegen_ssa/mir/block.rs
index aa82c85..af510d4 100644
--- a/src/librustc_codegen_ssa/mir/block.rs
+++ b/src/librustc_codegen_ssa/mir/block.rs
@@ -5,13 +5,13 @@
use rustc::mir::interpret::EvalErrorKind;
use rustc_target::abi::call::{ArgType, FnType, PassMode};
use rustc_target::spec::abi::Abi;
-use base;
-use MemFlags;
-use common::{self, IntPredicate};
-use meth;
use rustc_mir::monomorphize;
+use crate::base;
+use crate::MemFlags;
+use crate::common::{self, IntPredicate};
+use crate::meth;
-use traits::*;
+use crate::traits::*;
use syntax::symbol::Symbol;
use syntax_pos::Pos;
diff --git a/src/librustc_codegen_ssa/mir/constant.rs b/src/librustc_codegen_ssa/mir/constant.rs
index 56d4342..6bc69ef 100644
--- a/src/librustc_codegen_ssa/mir/constant.rs
+++ b/src/librustc_codegen_ssa/mir/constant.rs
@@ -6,7 +6,7 @@
use rustc::ty::{self, Ty};
use rustc::ty::layout;
use syntax::source_map::Span;
-use traits::*;
+use crate::traits::*;
use super::FunctionCx;
diff --git a/src/librustc_codegen_ssa/mir/mod.rs b/src/librustc_codegen_ssa/mir/mod.rs
index c7e2131..2e2cb3d 100644
--- a/src/librustc_codegen_ssa/mir/mod.rs
+++ b/src/librustc_codegen_ssa/mir/mod.rs
@@ -4,11 +4,11 @@
use rustc::mir::{self, Mir};
use rustc::ty::subst::Substs;
use rustc::session::config::DebugInfo;
-use base;
-use debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext};
use rustc_mir::monomorphize::Instance;
use rustc_target::abi::call::{FnType, PassMode};
-use traits::*;
+use crate::base;
+use crate::debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext};
+use crate::traits::*;
use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span};
use syntax::symbol::keywords;
diff --git a/src/librustc_codegen_ssa/mir/operand.rs b/src/librustc_codegen_ssa/mir/operand.rs
index 8aad4c1..2c6d968 100644
--- a/src/librustc_codegen_ssa/mir/operand.rs
+++ b/src/librustc_codegen_ssa/mir/operand.rs
@@ -3,11 +3,11 @@
use rustc::ty;
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout};
-use base;
-use MemFlags;
-use glue;
+use crate::base;
+use crate::MemFlags;
+use crate::glue;
-use traits::*;
+use crate::traits::*;
use std::fmt;
diff --git a/src/librustc_codegen_ssa/mir/place.rs b/src/librustc_codegen_ssa/mir/place.rs
index 596f97a..ffc774c 100644
--- a/src/librustc_codegen_ssa/mir/place.rs
+++ b/src/librustc_codegen_ssa/mir/place.rs
@@ -2,11 +2,11 @@
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt};
use rustc::mir;
use rustc::mir::tcx::PlaceTy;
-use MemFlags;
-use common::IntPredicate;
-use glue;
+use crate::MemFlags;
+use crate::common::IntPredicate;
+use crate::glue;
-use traits::*;
+use crate::traits::*;
use super::{FunctionCx, LocalRef};
use super::operand::OperandValue;
diff --git a/src/librustc_codegen_ssa/mir/rvalue.rs b/src/librustc_codegen_ssa/mir/rvalue.rs
index 9ca5414..25a7754 100644
--- a/src/librustc_codegen_ssa/mir/rvalue.rs
+++ b/src/librustc_codegen_ssa/mir/rvalue.rs
@@ -6,13 +6,13 @@
use rustc_apfloat::{ieee, Float, Status, Round};
use std::{u128, i128};
-use base;
-use MemFlags;
-use callee;
-use common::{self, RealPredicate, IntPredicate};
+use crate::base;
+use crate::MemFlags;
+use crate::callee;
+use crate::common::{self, RealPredicate, IntPredicate};
use rustc_mir::monomorphize;
-use traits::*;
+use crate::traits::*;
use super::{FunctionCx, LocalRef};
use super::operand::{OperandRef, OperandValue};
diff --git a/src/librustc_codegen_ssa/mir/statement.rs b/src/librustc_codegen_ssa/mir/statement.rs
index 9561a57..a1bd919 100644
--- a/src/librustc_codegen_ssa/mir/statement.rs
+++ b/src/librustc_codegen_ssa/mir/statement.rs
@@ -1,10 +1,10 @@
use rustc::mir;
-use traits::BuilderMethods;
+use crate::traits::BuilderMethods;
use super::FunctionCx;
use super::LocalRef;
use super::OperandValue;
-use traits::*;
+use crate::traits::*;
impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
pub fn codegen_statement(
diff --git a/src/librustc_codegen_ssa/mono_item.rs b/src/librustc_codegen_ssa/mono_item.rs
index 8488ab2..bfb6a91 100644
--- a/src/librustc_codegen_ssa/mono_item.rs
+++ b/src/librustc_codegen_ssa/mono_item.rs
@@ -1,10 +1,10 @@
-use base;
use rustc::hir;
use rustc::hir::def::Def;
use rustc::mir::mono::{Linkage, Visibility};
use rustc::ty::layout::HasTyCtxt;
use std::fmt;
-use traits::*;
+use crate::base;
+use crate::traits::*;
pub use rustc::mir::mono::MonoItem;
diff --git a/src/librustc_codegen_ssa/traits/asm.rs b/src/librustc_codegen_ssa/traits/asm.rs
index 7fe1692..a95bf3a 100644
--- a/src/librustc_codegen_ssa/traits/asm.rs
+++ b/src/librustc_codegen_ssa/traits/asm.rs
@@ -1,5 +1,5 @@
use super::BackendTypes;
-use mir::place::PlaceRef;
+use crate::mir::place::PlaceRef;
use rustc::hir::{GlobalAsm, InlineAsm};
pub trait AsmBuilderMethods<'tcx>: BackendTypes {
diff --git a/src/librustc_codegen_ssa/traits/builder.rs b/src/librustc_codegen_ssa/traits/builder.rs
index bc66087..bda0f3d 100644
--- a/src/librustc_codegen_ssa/traits/builder.rs
+++ b/src/librustc_codegen_ssa/traits/builder.rs
@@ -4,13 +4,14 @@
use super::intrinsic::IntrinsicCallMethods;
use super::type_::ArgTypeMethods;
use super::{HasCodegen, StaticBuilderMethods};
-use common::{AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate, SynchronizationScope};
-use mir::operand::OperandRef;
-use mir::place::PlaceRef;
+use crate::common::{AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate,
+ SynchronizationScope};
+use crate::mir::operand::OperandRef;
+use crate::mir::place::PlaceRef;
+use crate::MemFlags;
use rustc::ty::Ty;
use rustc::ty::layout::{Align, Size};
use std::ffi::CStr;
-use MemFlags;
use std::borrow::Cow;
use std::ops::Range;
diff --git a/src/librustc_codegen_ssa/traits/consts.rs b/src/librustc_codegen_ssa/traits/consts.rs
index 482fb67..319f4b4 100644
--- a/src/librustc_codegen_ssa/traits/consts.rs
+++ b/src/librustc_codegen_ssa/traits/consts.rs
@@ -1,5 +1,5 @@
use super::BackendTypes;
-use mir::place::PlaceRef;
+use crate::mir::place::PlaceRef;
use rustc::mir::interpret::Allocation;
use rustc::mir::interpret::Scalar;
use rustc::ty::layout;
diff --git a/src/librustc_codegen_ssa/traits/debuginfo.rs b/src/librustc_codegen_ssa/traits/debuginfo.rs
index 4163faa..0e606e7 100644
--- a/src/librustc_codegen_ssa/traits/debuginfo.rs
+++ b/src/librustc_codegen_ssa/traits/debuginfo.rs
@@ -1,5 +1,5 @@
use super::BackendTypes;
-use debuginfo::{FunctionDebugContext, MirDebugScope, VariableAccess, VariableKind};
+use crate::debuginfo::{FunctionDebugContext, MirDebugScope, VariableAccess, VariableKind};
use rustc::hir::def_id::CrateNum;
use rustc::mir;
use rustc::ty::{self, Ty};
diff --git a/src/librustc_codegen_ssa/traits/intrinsic.rs b/src/librustc_codegen_ssa/traits/intrinsic.rs
index a2d6b05..3cd0c39 100644
--- a/src/librustc_codegen_ssa/traits/intrinsic.rs
+++ b/src/librustc_codegen_ssa/traits/intrinsic.rs
@@ -1,5 +1,5 @@
use super::BackendTypes;
-use mir::operand::OperandRef;
+use crate::mir::operand::OperandRef;
use rustc::ty::Ty;
use rustc_target::abi::call::FnType;
use syntax_pos::Span;
diff --git a/src/librustc_codegen_ssa/traits/type_.rs b/src/librustc_codegen_ssa/traits/type_.rs
index 2ec0c8e..122aea0 100644
--- a/src/librustc_codegen_ssa/traits/type_.rs
+++ b/src/librustc_codegen_ssa/traits/type_.rs
@@ -1,8 +1,8 @@
use super::misc::MiscMethods;
use super::Backend;
use super::HasCodegen;
-use common::{self, TypeKind};
-use mir::place::PlaceRef;
+use crate::common::{self, TypeKind};
+use crate::mir::place::PlaceRef;
use rustc::ty::layout::{self, Align, Size, TyLayout};
use rustc::ty::{self, Ty};
use rustc::util::nodemap::FxHashMap;
diff --git a/src/librustc_codegen_ssa/traits/write.rs b/src/librustc_codegen_ssa/traits/write.rs
index cea89a7..e8ef815 100644
--- a/src/librustc_codegen_ssa/traits/write.rs
+++ b/src/librustc_codegen_ssa/traits/write.rs
@@ -1,6 +1,6 @@
-use back::lto::{LtoModuleCodegen, SerializedModule, ThinModule};
-use back::write::{CodegenContext, ModuleConfig};
-use {CompiledModule, ModuleCodegen};
+use crate::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule};
+use crate::back::write::{CodegenContext, ModuleConfig};
+use crate::{CompiledModule, ModuleCodegen};
use rustc::dep_graph::WorkProduct;
use rustc::util::time_graph::Timeline;
diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs
index ea530fa..0fc7b59 100644
--- a/src/librustc_errors/lib.rs
+++ b/src/librustc_errors/lib.rs
@@ -4,6 +4,7 @@
#![allow(unused_attributes)]
#![feature(range_contains)]
#![cfg_attr(unix, feature(libc))]
+#![feature(nll)]
#![feature(optin_builtin_traits)]
#![deny(rust_2018_idioms)]
diff --git a/src/librustc_llvm/lib.rs b/src/librustc_llvm/lib.rs
index 3fcb20a..292ce8b 100644
--- a/src/librustc_llvm/lib.rs
+++ b/src/librustc_llvm/lib.rs
@@ -1,4 +1,5 @@
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(static_nobundle)]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
diff --git a/src/librustc_lsan/lib.rs b/src/librustc_lsan/lib.rs
index 568bb54..3bdb86d 100644
--- a/src/librustc_lsan/lib.rs
+++ b/src/librustc_lsan/lib.rs
@@ -1,4 +1,5 @@
#![sanitizer_runtime]
+#![feature(nll)]
#![feature(sanitizer_runtime)]
#![feature(staged_api)]
#![no_std]
diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs
index d68ab97..4699f4c 100644
--- a/src/librustc_metadata/encoder.rs
+++ b/src/librustc_metadata/encoder.rs
@@ -674,7 +674,7 @@
let def_id = field.did;
debug!("IsolatedEncoder::encode_field({:?})", def_id);
- let variant_id = tcx.hir().as_local_node_id(variant.did).unwrap();
+ let variant_id = tcx.hir().as_local_hir_id(variant.did).unwrap();
let variant_data = tcx.hir().expect_variant_data(variant_id);
Entry {
diff --git a/src/librustc_mir/borrow_check/error_reporting.rs b/src/librustc_mir/borrow_check/error_reporting.rs
index afb2696..5a8d754 100644
--- a/src/librustc_mir/borrow_check/error_reporting.rs
+++ b/src/librustc_mir/borrow_check/error_reporting.rs
@@ -833,13 +833,13 @@
format!("`{}` would have to be valid for `{}`...", name, region_name),
);
- if let Some(fn_node_id) = self.infcx.tcx.hir().as_local_node_id(self.mir_def_id) {
+ if let Some(fn_hir_id) = self.infcx.tcx.hir().as_local_hir_id(self.mir_def_id) {
err.span_label(
drop_span,
format!(
"...but `{}` will be dropped here, when the function `{}` returns",
name,
- self.infcx.tcx.hir().name(fn_node_id),
+ self.infcx.tcx.hir().name_by_hir_id(fn_hir_id),
),
);
diff --git a/src/librustc_mir/borrow_check/move_errors.rs b/src/librustc_mir/borrow_check/move_errors.rs
index f7d4692..2a5433d 100644
--- a/src/librustc_mir/borrow_check/move_errors.rs
+++ b/src/librustc_mir/borrow_check/move_errors.rs
@@ -308,9 +308,8 @@
let upvar_decl = &self.mir.upvar_decls[field.index()];
let upvar_hir_id =
upvar_decl.var_hir_id.assert_crate_local();
- let upvar_node_id =
- self.infcx.tcx.hir().hir_to_node_id(upvar_hir_id);
- let upvar_span = self.infcx.tcx.hir().span(upvar_node_id);
+ let upvar_span = self.infcx.tcx.hir().span_by_hir_id(
+ upvar_hir_id);
diag.span_label(upvar_span, "captured outer variable");
break;
}
diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
index 2c4f359..3bb22d3 100644
--- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
+++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs
@@ -10,7 +10,7 @@
use rustc::ty::{self, RegionKind, RegionVid, Ty, TyCtxt};
use rustc::util::ppaux::RegionHighlightMode;
use rustc_errors::DiagnosticBuilder;
-use syntax::ast::{Name, DUMMY_NODE_ID};
+use syntax::ast::Name;
use syntax::symbol::keywords;
use syntax_pos::Span;
use syntax_pos::symbol::InternedString;
@@ -293,9 +293,9 @@
name: &InternedString,
) -> Span {
let scope = error_region.free_region_binding_scope(tcx);
- let node = tcx.hir().as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID);
+ let node = tcx.hir().as_local_hir_id(scope).unwrap_or(hir::DUMMY_HIR_ID);
- let span = tcx.sess.source_map().def_span(tcx.hir().span(node));
+ let span = tcx.sess.source_map().def_span(tcx.hir().span_by_hir_id(node));
if let Some(param) = tcx.hir()
.get_generics(scope)
.and_then(|generics| generics.get_named(name))
@@ -681,10 +681,13 @@
let (return_span, mir_description) = match tcx.hir().get(mir_node_id) {
hir::Node::Expr(hir::Expr {
- node: hir::ExprKind::Closure(_, _, _, span, gen_move),
+ node: hir::ExprKind::Closure(_, return_ty, _, span, gen_move),
..
}) => (
- tcx.sess.source_map().end_point(*span),
+ match return_ty.output {
+ hir::FunctionRetTy::DefaultReturn(_) => tcx.sess.source_map().end_point(*span),
+ hir::FunctionRetTy::Return(_) => return_ty.output.span(),
+ },
if gen_move.is_some() {
" of generator"
} else {
diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
index bd7b882..f6bbaf2 100644
--- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
+++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/var_name.rs
@@ -71,11 +71,10 @@
upvar_index: usize,
) -> (Symbol, Span) {
let upvar_hir_id = mir.upvar_decls[upvar_index].var_hir_id.assert_crate_local();
- let upvar_node_id = tcx.hir().hir_to_node_id(upvar_hir_id);
- debug!("get_upvar_name_and_span_for_region: upvar_node_id={:?}", upvar_node_id);
+ debug!("get_upvar_name_and_span_for_region: upvar_hir_id={:?}", upvar_hir_id);
- let upvar_name = tcx.hir().name(upvar_node_id);
- let upvar_span = tcx.hir().span(upvar_node_id);
+ let upvar_name = tcx.hir().name_by_hir_id(upvar_hir_id);
+ let upvar_span = tcx.hir().span_by_hir_id(upvar_hir_id);
debug!("get_upvar_name_and_span_for_region: upvar_name={:?} upvar_span={:?}",
upvar_name, upvar_span);
diff --git a/src/librustc_mir/borrow_check/nll/universal_regions.rs b/src/librustc_mir/borrow_check/nll/universal_regions.rs
index 0a214e6..ad4444e 100644
--- a/src/librustc_mir/borrow_check/nll/universal_regions.rs
+++ b/src/librustc_mir/borrow_check/nll/universal_regions.rs
@@ -771,9 +771,8 @@
owner: fn_def_id.index,
local_id: *late_bound,
};
- let region_node_id = tcx.hir().hir_to_node_id(hir_id);
- let name = tcx.hir().name(region_node_id).as_interned_str();
- let region_def_id = tcx.hir().local_def_id(region_node_id);
+ let name = tcx.hir().name_by_hir_id(hir_id).as_interned_str();
+ let region_def_id = tcx.hir().local_def_id_from_hir_id(hir_id);
let liberated_region = tcx.mk_region(ty::ReFree(ty::FreeRegion {
scope: fn_def_id,
bound_region: ty::BoundRegion::BrNamed(region_def_id, name),
diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs
index a52b032..ed35fb2 100644
--- a/src/librustc_mir/build/mod.rs
+++ b/src/librustc_mir/build/mod.rs
@@ -64,8 +64,8 @@
) => {
(*body_id, ty.span)
}
- Node::AnonConst(hir::AnonConst { body, id, .. }) => {
- (*body, tcx.hir().span(*id))
+ Node::AnonConst(hir::AnonConst { body, hir_id, .. }) => {
+ (*body, tcx.hir().span_by_hir_id(*hir_id))
}
_ => span_bug!(tcx.hir().span(id), "can't build MIR for {:?}", def_id),
@@ -114,7 +114,7 @@
let self_arg;
if let Some(ref fn_decl) = tcx.hir().fn_decl(owner_id) {
let ty_hir_id = fn_decl.inputs[index].hir_id;
- let ty_span = tcx.hir().span(tcx.hir().hir_to_node_id(ty_hir_id));
+ let ty_span = tcx.hir().span_by_hir_id(ty_hir_id);
opt_ty_info = Some(ty_span);
self_arg = if index == 0 && fn_decl.implicit_self.has_implicit_self() {
match fn_decl.implicit_self {
diff --git a/src/librustc_mir/hair/cx/block.rs b/src/librustc_mir/hair/cx/block.rs
index c24cf956..ed9f95f 100644
--- a/src/librustc_mir/hair/cx/block.rs
+++ b/src/librustc_mir/hair/cx/block.rs
@@ -48,7 +48,7 @@
for (index, stmt) in stmts.iter().enumerate() {
let hir_id = stmt.hir_id;
let opt_dxn_ext = cx.region_scope_tree.opt_destruction_scope(hir_id.local_id);
- let stmt_span = StatementSpan(cx.tcx.hir().span(stmt.id));
+ let stmt_span = StatementSpan(cx.tcx.hir().span_by_hir_id(hir_id));
match stmt.node {
hir::StmtKind::Expr(ref expr) |
hir::StmtKind::Semi(ref expr) => {
diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs
index 7f3c24d..a76aa74 100644
--- a/src/librustc_mir/monomorphize/collector.rs
+++ b/src/librustc_mir/monomorphize/collector.rs
@@ -450,8 +450,8 @@
if recursion_depth > *tcx.sess.recursion_limit.get() {
let error = format!("reached the recursion limit while instantiating `{}`",
instance);
- if let Some(node_id) = tcx.hir().as_local_node_id(def_id) {
- tcx.sess.span_fatal(tcx.hir().span(node_id), &error);
+ if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) {
+ tcx.sess.span_fatal(tcx.hir().span_by_hir_id(hir_id), &error);
} else {
tcx.sess.fatal(&error);
}
@@ -482,8 +482,8 @@
let instance_name = instance.to_string();
let msg = format!("reached the type-length limit while instantiating `{:.64}...`",
instance_name);
- let mut diag = if let Some(node_id) = tcx.hir().as_local_node_id(instance.def_id()) {
- tcx.sess.struct_span_fatal(tcx.hir().span(node_id), &msg)
+ let mut diag = if let Some(hir_id) = tcx.hir().as_local_hir_id(instance.def_id()) {
+ tcx.sess.struct_span_fatal(tcx.hir().span_by_hir_id(hir_id), &msg)
} else {
tcx.sess.struct_fatal(&msg)
};
diff --git a/src/librustc_msan/lib.rs b/src/librustc_msan/lib.rs
index 568bb54..3bdb86d 100644
--- a/src/librustc_msan/lib.rs
+++ b/src/librustc_msan/lib.rs
@@ -1,4 +1,5 @@
#![sanitizer_runtime]
+#![feature(nll)]
#![feature(sanitizer_runtime)]
#![feature(staged_api)]
#![no_std]
diff --git a/src/librustc_plugin/lib.rs b/src/librustc_plugin/lib.rs
index 32e003f..0ea1634 100644
--- a/src/librustc_plugin/lib.rs
+++ b/src/librustc_plugin/lib.rs
@@ -52,6 +52,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
+#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
#![recursion_limit="256"]
diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs
index 14a0922..d31dadd 100644
--- a/src/librustc_privacy/lib.rs
+++ b/src/librustc_privacy/lib.rs
@@ -2,6 +2,7 @@
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
#![recursion_limit="256"]
diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs
index 6399608..3b6179f 100644
--- a/src/librustc_resolve/check_unused.rs
+++ b/src/librustc_resolve/check_unused.rs
@@ -7,23 +7,52 @@
//
// Unused trait imports can't be checked until the method resolution. We save
// candidates here, and do the actual check in librustc_typeck/check_unused.rs.
+//
+// Checking for unused imports is split into three steps:
+//
+// - `UnusedImportCheckVisitor` walks the AST to find all the unused imports
+// inside of `UseTree`s, recording their `NodeId`s and grouping them by
+// the parent `use` item
+//
+// - `calc_unused_spans` then walks over all the `use` items marked in the
+// previous step to collect the spans associated with the `NodeId`s and to
+// calculate the spans that can be removed by rustfix; This is done in a
+// separate step to be able to collapse the adjacent spans that rustfix
+// will remove
+//
+// - `check_crate` finally emits the diagnostics based on the data generated
+// in the last step
use std::ops::{Deref, DerefMut};
use crate::Resolver;
use crate::resolve_imports::ImportDirectiveSubclass;
-use rustc::{lint, ty};
use rustc::util::nodemap::NodeMap;
+use rustc::{lint, ty};
+use rustc_data_structures::fx::FxHashSet;
use syntax::ast;
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, MultiSpan, DUMMY_SP};
+struct UnusedImport<'a> {
+ use_tree: &'a ast::UseTree,
+ use_tree_id: ast::NodeId,
+ item_span: Span,
+ unused: FxHashSet<ast::NodeId>,
+}
+
+impl<'a> UnusedImport<'a> {
+ fn add(&mut self, id: ast::NodeId) {
+ self.unused.insert(id);
+ }
+}
struct UnusedImportCheckVisitor<'a, 'b: 'a> {
resolver: &'a mut Resolver<'b>,
/// All the (so far) unused imports, grouped path list
- unused_imports: NodeMap<NodeMap<Span>>,
+ unused_imports: NodeMap<UnusedImport<'a>>,
+ base_use_tree: Option<&'a ast::UseTree>,
base_id: ast::NodeId,
item_span: Span,
}
@@ -46,7 +75,7 @@
impl<'a, 'b> UnusedImportCheckVisitor<'a, 'b> {
// We have information about whether `use` (import) directives are actually
// used now. If an import is not used at all, we signal a lint error.
- fn check_import(&mut self, item_id: ast::NodeId, id: ast::NodeId, span: Span) {
+ fn check_import(&mut self, id: ast::NodeId) {
let mut used = false;
self.per_ns(|this, ns| used |= this.used_imports.contains(&(id, ns)));
if !used {
@@ -54,16 +83,31 @@
// Check later.
return;
}
- self.unused_imports.entry(item_id).or_default().insert(id, span);
+ self.unused_import(self.base_id).add(id);
} else {
// This trait import is definitely used, in a way other than
// method resolution.
self.maybe_unused_trait_imports.remove(&id);
- if let Some(i) = self.unused_imports.get_mut(&item_id) {
- i.remove(&id);
+ if let Some(i) = self.unused_imports.get_mut(&self.base_id) {
+ i.unused.remove(&id);
}
}
}
+
+ fn unused_import(&mut self, id: ast::NodeId) -> &mut UnusedImport<'a> {
+ let use_tree_id = self.base_id;
+ let use_tree = self.base_use_tree.unwrap();
+ let item_span = self.item_span;
+
+ self.unused_imports
+ .entry(id)
+ .or_insert_with(|| UnusedImport {
+ use_tree,
+ use_tree_id,
+ item_span,
+ unused: FxHashSet::default(),
+ })
+ }
}
impl<'a, 'b> Visitor<'a> for UnusedImportCheckVisitor<'a, 'b> {
@@ -88,31 +132,112 @@
// This allows the grouping of all the lints in the same item
if !nested {
self.base_id = id;
+ self.base_use_tree = Some(use_tree);
}
if let ast::UseTreeKind::Nested(ref items) = use_tree.kind {
- // If it's the parent group, cover the entire use item
- let span = if nested {
- use_tree.span
- } else {
- self.item_span
- };
-
if items.is_empty() {
- self.unused_imports
- .entry(self.base_id)
- .or_default()
- .insert(id, span);
+ self.unused_import(self.base_id).add(id);
}
} else {
- let base_id = self.base_id;
- self.check_import(base_id, id, use_tree.span);
+ self.check_import(id);
}
visit::walk_use_tree(self, use_tree, id);
}
}
+enum UnusedSpanResult {
+ Used,
+ FlatUnused(Span, Span),
+ NestedFullUnused(Vec<Span>, Span),
+ NestedPartialUnused(Vec<Span>, Vec<Span>),
+}
+
+fn calc_unused_spans(
+ unused_import: &UnusedImport<'_>,
+ use_tree: &ast::UseTree,
+ use_tree_id: ast::NodeId,
+) -> UnusedSpanResult {
+ // The full span is the whole item's span if this current tree is not nested inside another
+ // This tells rustfix to remove the whole item if all the imports are unused
+ let full_span = if unused_import.use_tree.span == use_tree.span {
+ unused_import.item_span
+ } else {
+ use_tree.span
+ };
+ match use_tree.kind {
+ ast::UseTreeKind::Simple(..) | ast::UseTreeKind::Glob => {
+ if unused_import.unused.contains(&use_tree_id) {
+ UnusedSpanResult::FlatUnused(use_tree.span, full_span)
+ } else {
+ UnusedSpanResult::Used
+ }
+ }
+ ast::UseTreeKind::Nested(ref nested) => {
+ if nested.len() == 0 {
+ return UnusedSpanResult::FlatUnused(use_tree.span, full_span);
+ }
+
+ let mut unused_spans = Vec::new();
+ let mut to_remove = Vec::new();
+ let mut all_nested_unused = true;
+ let mut previous_unused = false;
+ for (pos, (use_tree, use_tree_id)) in nested.iter().enumerate() {
+ let remove = match calc_unused_spans(unused_import, use_tree, *use_tree_id) {
+ UnusedSpanResult::Used => {
+ all_nested_unused = false;
+ None
+ }
+ UnusedSpanResult::FlatUnused(span, remove) => {
+ unused_spans.push(span);
+ Some(remove)
+ }
+ UnusedSpanResult::NestedFullUnused(mut spans, remove) => {
+ unused_spans.append(&mut spans);
+ Some(remove)
+ }
+ UnusedSpanResult::NestedPartialUnused(mut spans, mut to_remove_extra) => {
+ all_nested_unused = false;
+ unused_spans.append(&mut spans);
+ to_remove.append(&mut to_remove_extra);
+ None
+ }
+ };
+ if let Some(remove) = remove {
+ let remove_span = if nested.len() == 1 {
+ remove
+ } else if pos == nested.len() - 1 || !all_nested_unused {
+ // Delete everything from the end of the last import, to delete the
+ // previous comma
+ nested[pos - 1].0.span.shrink_to_hi().to(use_tree.span)
+ } else {
+ // Delete everything until the next import, to delete the trailing commas
+ use_tree.span.to(nested[pos + 1].0.span.shrink_to_lo())
+ };
+
+ // Try to collapse adjacent spans into a single one. This prevents all cases of
+ // overlapping removals, which are not supported by rustfix
+ if previous_unused && !to_remove.is_empty() {
+ let previous = to_remove.pop().unwrap();
+ to_remove.push(previous.to(remove_span));
+ } else {
+ to_remove.push(remove_span);
+ }
+ }
+ previous_unused = remove.is_some();
+ }
+ if unused_spans.is_empty() {
+ UnusedSpanResult::Used
+ } else if all_nested_unused {
+ UnusedSpanResult::NestedFullUnused(unused_spans, full_span)
+ } else {
+ UnusedSpanResult::NestedPartialUnused(unused_spans, to_remove)
+ }
+ }
+ }
+}
+
pub fn check_crate(resolver: &mut Resolver<'_>, krate: &ast::Crate) {
for directive in resolver.potentially_unused_imports.iter() {
match directive.subclass {
@@ -152,14 +277,33 @@
let mut visitor = UnusedImportCheckVisitor {
resolver,
unused_imports: Default::default(),
+ base_use_tree: None,
base_id: ast::DUMMY_NODE_ID,
item_span: DUMMY_SP,
};
visit::walk_crate(&mut visitor, krate);
- for (id, spans) in &visitor.unused_imports {
+ for unused in visitor.unused_imports.values() {
+ let mut fixes = Vec::new();
+ let mut spans = match calc_unused_spans(unused, unused.use_tree, unused.use_tree_id) {
+ UnusedSpanResult::Used => continue,
+ UnusedSpanResult::FlatUnused(span, remove) => {
+ fixes.push((remove, String::new()));
+ vec![span]
+ }
+ UnusedSpanResult::NestedFullUnused(spans, remove) => {
+ fixes.push((remove, String::new()));
+ spans
+ }
+ UnusedSpanResult::NestedPartialUnused(spans, remove) => {
+ for fix in &remove {
+ fixes.push((*fix, String::new()));
+ }
+ spans
+ }
+ };
+
let len = spans.len();
- let mut spans = spans.values().cloned().collect::<Vec<Span>>();
spans.sort();
let ms = MultiSpan::from_spans(spans.clone());
let mut span_snippets = spans.iter()
@@ -177,6 +321,21 @@
} else {
String::new()
});
- visitor.session.buffer_lint(lint::builtin::UNUSED_IMPORTS, *id, ms, &msg);
+
+ let fix_msg = if fixes.len() == 1 && fixes[0].0 == unused.item_span {
+ "remove the whole `use` item"
+ } else if spans.len() > 1 {
+ "remove the unused imports"
+ } else {
+ "remove the unused import"
+ };
+
+ visitor.session.buffer_lint_with_diagnostic(
+ lint::builtin::UNUSED_IMPORTS,
+ unused.use_tree_id,
+ ms,
+ &msg,
+ lint::builtin::BuiltinLintDiagnostics::UnusedImports(fix_msg.into(), fixes),
+ );
}
}
diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs
index ecbfcec..ad73b30 100644
--- a/src/librustc_resolve/lib.rs
+++ b/src/librustc_resolve/lib.rs
@@ -2,6 +2,7 @@
#![feature(crate_visibility_modifier)]
#![feature(label_break_value)]
+#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_sort_by_cached_key)]
diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs
index c4a2ebe..1f7b6d7 100644
--- a/src/librustc_save_analysis/lib.rs
+++ b/src/librustc_save_analysis/lib.rs
@@ -1,5 +1,6 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(custom_attribute)]
+#![feature(nll)]
#![deny(rust_2018_idioms)]
#![allow(unused_attributes)]
diff --git a/src/librustc_tsan/lib.rs b/src/librustc_tsan/lib.rs
index 568bb54..3bdb86d 100644
--- a/src/librustc_tsan/lib.rs
+++ b/src/librustc_tsan/lib.rs
@@ -1,4 +1,5 @@
#![sanitizer_runtime]
+#![feature(nll)]
#![feature(sanitizer_runtime)]
#![feature(staged_api)]
#![no_std]
diff --git a/src/librustc_typeck/Cargo.toml b/src/librustc_typeck/Cargo.toml
index 68b28a6..dcfcd74 100644
--- a/src/librustc_typeck/Cargo.toml
+++ b/src/librustc_typeck/Cargo.toml
@@ -2,6 +2,7 @@
authors = ["The Rust Project Developers"]
name = "rustc_typeck"
version = "0.0.0"
+edition = "2018"
[lib]
name = "rustc_typeck"
@@ -14,7 +15,7 @@
log = "0.4"
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }
-rustc_errors = { path = "../librustc_errors" }
+errors = { path = "../librustc_errors", package = "rustc_errors" }
rustc_target = { path = "../librustc_target" }
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
syntax = { path = "../libsyntax" }
diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs
index 757385a..ee3fd6e 100644
--- a/src/librustc_typeck/astconv.rs
+++ b/src/librustc_typeck/astconv.rs
@@ -3,13 +3,13 @@
//! instance of `AstConv`.
use errors::{Applicability, DiagnosticId};
-use hir::{self, GenericArg, GenericArgs};
-use hir::def::Def;
-use hir::def_id::DefId;
-use hir::HirVec;
-use lint;
-use middle::resolve_lifetime as rl;
-use namespace::Namespace;
+use crate::hir::{self, GenericArg, GenericArgs};
+use crate::hir::def::Def;
+use crate::hir::def_id::DefId;
+use crate::hir::HirVec;
+use crate::lint;
+use crate::middle::resolve_lifetime as rl;
+use crate::namespace::Namespace;
use rustc::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS;
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt, ToPredicate, TypeFoldable};
@@ -18,15 +18,15 @@
use rustc::ty::wf::object_region_bounds;
use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi;
-use require_c_abi_if_variadic;
+use crate::require_c_abi_if_variadic;
use smallvec::SmallVec;
use syntax::ast;
use syntax::feature_gate::{GateIssue, emit_feature_err};
use syntax::ptr::P;
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{DUMMY_SP, Span, MultiSpan};
-use util::common::ErrorReported;
-use util::nodemap::FxHashMap;
+use crate::util::common::ErrorReported;
+use crate::util::nodemap::FxHashMap;
use std::collections::BTreeSet;
use std::iter;
@@ -111,7 +111,7 @@
{
let tcx = self.tcx();
let lifetime_name = |def_id| {
- tcx.hir().name(tcx.hir().as_local_node_id(def_id).unwrap()).as_interned_str()
+ tcx.hir().name_by_hir_id(tcx.hir().as_local_hir_id(def_id).unwrap()).as_interned_str()
};
let r = match tcx.named_region(lifetime.hir_id) {
@@ -1682,12 +1682,13 @@
assert_eq!(opt_self_ty, None);
self.prohibit_generics(&path.segments);
- let node_id = tcx.hir().as_local_node_id(did).unwrap();
- let item_id = tcx.hir().get_parent_node(node_id);
- let item_def_id = tcx.hir().local_def_id(item_id);
+ let hir_id = tcx.hir().as_local_hir_id(did).unwrap();
+ let item_id = tcx.hir().get_parent_node_by_hir_id(hir_id);
+ let item_def_id = tcx.hir().local_def_id_from_hir_id(item_id);
let generics = tcx.generics_of(item_def_id);
- let index = generics.param_def_id_to_index[&tcx.hir().local_def_id(node_id)];
- tcx.mk_ty_param(index, tcx.hir().name(node_id).as_interned_str())
+ let index = generics.param_def_id_to_index[
+ &tcx.hir().local_def_id_from_hir_id(hir_id)];
+ tcx.mk_ty_param(index, tcx.hir().name_by_hir_id(hir_id).as_interned_str())
}
Def::SelfTy(_, Some(def_id)) => {
// `Self` in impl (we know the concrete type).
diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs
index a90d83f..3a670c8 100644
--- a/src/librustc_typeck/check/_match.rs
+++ b/src/librustc_typeck/check/_match.rs
@@ -1,5 +1,6 @@
-use check::{FnCtxt, Expectation, Diverges, Needs};
-use check::coercion::CoerceMany;
+use crate::check::{FnCtxt, Expectation, Diverges, Needs};
+use crate::check::coercion::CoerceMany;
+use crate::util::nodemap::FxHashMap;
use errors::Applicability;
use rustc::hir::{self, PatKind};
use rustc::hir::def::{Def, CtorKind};
@@ -13,7 +14,6 @@
use syntax::ptr::P;
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::Span;
-use util::nodemap::FxHashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::cmp;
diff --git a/src/librustc_typeck/check/cast.rs b/src/librustc_typeck/check/cast.rs
index 85cae17..be6d432 100644
--- a/src/librustc_typeck/check/cast.rs
+++ b/src/librustc_typeck/check/cast.rs
@@ -31,8 +31,8 @@
use super::FnCtxt;
use errors::{DiagnosticBuilder,Applicability};
-use hir::def_id::DefId;
-use lint;
+use crate::hir::def_id::DefId;
+use crate::lint;
use rustc::hir;
use rustc::session::Session;
use rustc::traits;
@@ -43,7 +43,7 @@
use rustc::middle::lang_items;
use syntax::ast;
use syntax_pos::Span;
-use util::common::ErrorReported;
+use crate::util::common::ErrorReported;
/// Reifies a cast check to be checked once we have full type information for
/// a function context.
@@ -294,7 +294,7 @@
.emit();
}
CastError::SizedUnsizedCast => {
- use structured_errors::{SizedUnsizedCastError, StructuredDiagnostic};
+ use crate::structured_errors::{SizedUnsizedCastError, StructuredDiagnostic};
SizedUnsizedCastError::new(&fcx.tcx.sess,
self.span,
self.expr_ty,
diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs
index df83c92..24c3009 100644
--- a/src/librustc_typeck/check/closure.rs
+++ b/src/librustc_typeck/check/closure.rs
@@ -2,8 +2,8 @@
use super::{check_fn, Expectation, FnCtxt, GeneratorTypes};
-use astconv::AstConv;
-use middle::region;
+use crate::astconv::AstConv;
+use crate::middle::region;
use rustc::hir::def_id::DefId;
use rustc::infer::{InferOk, InferResult};
use rustc::infer::LateBoundRegionConversionTime;
diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs
index d1dfe94..8a91e42 100644
--- a/src/librustc_typeck/check/coercion.rs
+++ b/src/librustc_typeck/check/coercion.rs
@@ -50,7 +50,7 @@
//! sort of a minor point so I've opted to leave it for later---after all
//! we may want to adjust precisely when coercions occur.
-use check::{FnCtxt, Needs};
+use crate::check::{FnCtxt, Needs};
use errors::DiagnosticBuilder;
use rustc::hir;
use rustc::hir::def_id::DefId;
diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs
index 0eb8d7d..0cc5071 100644
--- a/src/librustc_typeck/check/compare_method.rs
+++ b/src/librustc_typeck/check/compare_method.rs
@@ -736,8 +736,8 @@
in impl_m_type_params.zip(trait_m_type_params)
{
if impl_synthetic != trait_synthetic {
- let impl_node_id = tcx.hir().as_local_node_id(impl_def_id).unwrap();
- let impl_span = tcx.hir().span(impl_node_id);
+ let impl_hir_id = tcx.hir().as_local_hir_id(impl_def_id).unwrap();
+ let impl_span = tcx.hir().span_by_hir_id(impl_hir_id);
let trait_span = tcx.def_span(trait_def_id);
let mut err = struct_span_err!(tcx.sess,
impl_span,
@@ -840,7 +840,7 @@
match param.kind {
GenericParamKind::Lifetime { .. } => None,
GenericParamKind::Type { .. } => {
- if param.id == impl_node_id {
+ if param.hir_id == impl_hir_id {
Some(¶m.bounds)
} else {
None
diff --git a/src/librustc_typeck/check/demand.rs b/src/librustc_typeck/check/demand.rs
index 0d4690c..82f0037 100644
--- a/src/librustc_typeck/check/demand.rs
+++ b/src/librustc_typeck/check/demand.rs
@@ -1,4 +1,4 @@
-use check::FnCtxt;
+use crate::check::FnCtxt;
use rustc::infer::InferOk;
use rustc::traits::{ObligationCause, ObligationCauseCode};
diff --git a/src/librustc_typeck/check/dropck.rs b/src/librustc_typeck/check/dropck.rs
index 60b5db0..0fc8241 100644
--- a/src/librustc_typeck/check/dropck.rs
+++ b/src/librustc_typeck/check/dropck.rs
@@ -1,13 +1,13 @@
-use check::regionck::RegionCtxt;
+use crate::check::regionck::RegionCtxt;
-use hir::def_id::DefId;
+use crate::hir::def_id::DefId;
use rustc::infer::outlives::env::OutlivesEnvironment;
use rustc::infer::{self, InferOk, SuppressRegionErrors};
use rustc::middle::region;
use rustc::traits::{ObligationCause, TraitEngine, TraitEngineExt};
use rustc::ty::subst::{Subst, Substs, UnpackedKind};
use rustc::ty::{self, Ty, TyCtxt};
-use util::common::ErrorReported;
+use crate::util::common::ErrorReported;
use syntax::ast;
use syntax_pos::Span;
@@ -184,7 +184,7 @@
// absent. So we report an error that the Drop impl injected a
// predicate that is not present on the struct definition.
- let self_type_node_id = tcx.hir().as_local_node_id(self_type_did).unwrap();
+ let self_type_hir_id = tcx.hir().as_local_hir_id(self_type_did).unwrap();
let drop_impl_span = tcx.def_span(drop_impl_did);
@@ -216,7 +216,7 @@
// repeated `contains` calls.
if !assumptions_in_impl_context.contains(&predicate) {
- let item_span = tcx.hir().span(self_type_node_id);
+ let item_span = tcx.hir().span_by_hir_id(self_type_hir_id);
struct_span_err!(
tcx.sess,
drop_impl_span,
diff --git a/src/librustc_typeck/check/generator_interior.rs b/src/librustc_typeck/check/generator_interior.rs
index 225fa1d..7f4b0a9 100644
--- a/src/librustc_typeck/check/generator_interior.rs
+++ b/src/librustc_typeck/check/generator_interior.rs
@@ -11,7 +11,7 @@
use rustc_data_structures::sync::Lrc;
use syntax_pos::Span;
use super::FnCtxt;
-use util::nodemap::FxHashMap;
+use crate::util::nodemap::FxHashMap;
struct InteriorVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs
index 82d4300..912ea39 100644
--- a/src/librustc_typeck/check/intrinsic.rs
+++ b/src/librustc_typeck/check/intrinsic.rs
@@ -4,7 +4,7 @@
use rustc::traits::{ObligationCause, ObligationCauseCode};
use rustc::ty::{self, TyCtxt, Ty};
use rustc::ty::subst::Subst;
-use require_same_types;
+use crate::require_same_types;
use rustc_target::spec::abi::Abi;
use syntax::symbol::Symbol;
diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs
index 2cf2974..34b248a 100644
--- a/src/librustc_typeck/check/method/confirm.rs
+++ b/src/librustc_typeck/check/method/confirm.rs
@@ -1,9 +1,9 @@
use super::{probe, MethodCallee};
-use astconv::AstConv;
-use check::{FnCtxt, PlaceOp, callee, Needs};
-use hir::GenericArg;
-use hir::def_id::DefId;
+use crate::astconv::AstConv;
+use crate::check::{FnCtxt, PlaceOp, callee, Needs};
+use crate::hir::GenericArg;
+use crate::hir::def_id::DefId;
use rustc::ty::subst::Substs;
use rustc::traits;
use rustc::ty::{self, Ty, GenericParamDefKind};
diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs
index b7d0157..02cd5b7 100644
--- a/src/librustc_typeck/check/method/mod.rs
+++ b/src/librustc_typeck/check/method/mod.rs
@@ -10,9 +10,9 @@
pub use self::CandidateSource::*;
pub use self::suggest::{SelfSource, TraitInfo};
-use check::FnCtxt;
+use crate::check::FnCtxt;
+use crate::namespace::Namespace;
use errors::{Applicability, DiagnosticBuilder};
-use namespace::Namespace;
use rustc_data_structures::sync::Lrc;
use rustc::hir;
use rustc::hir::def::Def;
@@ -29,7 +29,7 @@
use crate::{check_type_alias_enum_variants_enabled};
use self::probe::{IsSuggestion, ProbeScope};
-pub fn provide(providers: &mut ty::query::Providers) {
+pub fn provide(providers: &mut ty::query::Providers<'_>) {
suggest::provide(providers);
probe::provide(providers);
}
diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs
index ada4a95..cf31a54 100644
--- a/src/librustc_typeck/check/method/probe.rs
+++ b/src/librustc_typeck/check/method/probe.rs
@@ -3,11 +3,11 @@
use super::{CandidateSource, ImplSource, TraitSource};
use super::suggest;
-use check::autoderef::{self, Autoderef};
-use check::FnCtxt;
-use hir::def_id::DefId;
-use hir::def::Def;
-use namespace::Namespace;
+use crate::check::autoderef::{self, Autoderef};
+use crate::check::FnCtxt;
+use crate::hir::def_id::DefId;
+use crate::hir::def::Def;
+use crate::namespace::Namespace;
use rustc_data_structures::sync::Lrc;
use rustc::hir;
diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs
index 55b6e8f..8f98b34 100644
--- a/src/librustc_typeck/check/method/suggest.rs
+++ b/src/librustc_typeck/check/method/suggest.rs
@@ -1,10 +1,11 @@
//! Give useful errors and suggestions to users when an item can't be
//! found or is otherwise invalid.
-use check::FnCtxt;
+use crate::check::FnCtxt;
+use crate::middle::lang_items::FnOnceTraitLangItem;
+use crate::namespace::Namespace;
+use crate::util::nodemap::FxHashSet;
use errors::{Applicability, DiagnosticBuilder};
-use middle::lang_items::FnOnceTraitLangItem;
-use namespace::Namespace;
use rustc_data_structures::sync::Lrc;
use rustc::hir::{self, ExprKind, Node, QPath};
use rustc::hir::def::Def;
@@ -15,7 +16,6 @@
use rustc::traits::Obligation;
use rustc::ty::{self, Adt, Ty, TyCtxt, ToPolyTraitRef, ToPredicate, TypeFoldable};
use rustc::ty::item_path::with_crate_prefix;
-use util::nodemap::FxHashSet;
use syntax_pos::{Span, FileName};
use syntax::ast;
use syntax::util::lev_distance::find_best_match_for_name;
diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs
index fb8f608..467032f 100644
--- a/src/librustc_typeck/check/mod.rs
+++ b/src/librustc_typeck/check/mod.rs
@@ -83,15 +83,15 @@
pub mod intrinsic;
mod op;
-use astconv::{AstConv, PathSeg};
+use crate::astconv::{AstConv, PathSeg};
use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
use rustc::hir::{self, ExprKind, GenericArg, ItemKind, Node, PatKind, QPath};
use rustc::hir::def::{CtorKind, Def};
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use middle::lang_items;
-use namespace::Namespace;
+use crate::middle::lang_items;
+use crate::namespace::Namespace;
use rustc::infer::{self, InferCtxt, InferOk, InferResult, RegionVariableOrigin};
use rustc::infer::canonical::{Canonical, OriginalQueryValues, QueryResponse};
use rustc_data_structures::indexed_vec::Idx;
@@ -130,14 +130,14 @@
use std::ops::{self, Deref};
use std::slice;
-use require_c_abi_if_variadic;
-use session::{CompileIncomplete, Session};
-use session::config::EntryFnType;
-use TypeAndSubsts;
-use lint;
-use util::captures::Captures;
-use util::common::{ErrorReported, indenter};
-use util::nodemap::{DefIdMap, DefIdSet, FxHashMap, FxHashSet, NodeMap};
+use crate::require_c_abi_if_variadic;
+use crate::session::{CompileIncomplete, Session};
+use crate::session::config::EntryFnType;
+use crate::TypeAndSubsts;
+use crate::lint;
+use crate::util::captures::Captures;
+use crate::util::common::{ErrorReported, indenter};
+use crate::util::nodemap::{DefIdMap, DefIdSet, FxHashMap, FxHashSet, NodeMap};
pub use self::Expectation::*;
use self::autoderef::Autoderef;
@@ -1883,14 +1883,14 @@
// Check for duplicate discriminant values
if let Some(i) = disr_vals.iter().position(|&x| x.val == discr.val) {
let variant_did = def.variants[VariantIdx::new(i)].did;
- let variant_i_node_id = tcx.hir().as_local_node_id(variant_did).unwrap();
- let variant_i = tcx.hir().expect_variant(variant_i_node_id);
+ let variant_i_hir_id = tcx.hir().as_local_hir_id(variant_did).unwrap();
+ let variant_i = tcx.hir().expect_variant(variant_i_hir_id);
let i_span = match variant_i.node.disr_expr {
- Some(ref expr) => tcx.hir().span(expr.id),
- None => tcx.hir().span(variant_i_node_id)
+ Some(ref expr) => tcx.hir().span_by_hir_id(expr.hir_id),
+ None => tcx.hir().span_by_hir_id(variant_i_hir_id)
};
let span = match v.node.disr_expr {
- Some(ref expr) => tcx.hir().span(expr.id),
+ Some(ref expr) => tcx.hir().span_by_hir_id(expr.hir_id),
None => v.span
};
struct_span_err!(tcx.sess, span, E0081,
@@ -3044,7 +3044,7 @@
// arguments which we skipped above.
if variadic {
fn variadic_error<'tcx>(s: &Session, span: Span, t: Ty<'tcx>, cast_ty: &str) {
- use structured_errors::{VariadicError, StructuredDiagnostic};
+ use crate::structured_errors::{VariadicError, StructuredDiagnostic};
VariadicError::new(s, span, t, cast_ty).diagnostic().emit();
}
@@ -3685,8 +3685,8 @@
display
}
- fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS)
- -> DiagnosticBuilder {
+ fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS<'_>)
+ -> DiagnosticBuilder<'_> {
type_error_struct!(self.tcx().sess, span, expr_t, E0609,
"no field `{}` on type `{}`",
field, expr_t)
@@ -5257,7 +5257,7 @@
&self,
blk: &'gcx hir::Block,
expected_ty: Ty<'tcx>,
- err: &mut DiagnosticBuilder,
+ err: &mut DiagnosticBuilder<'_>,
) {
if let Some(span_semi) = self.could_remove_semicolon(blk, expected_ty) {
err.span_suggestion(
@@ -5703,8 +5703,8 @@
});
for (&used, param) in types_used.iter().zip(types) {
if !used {
- let id = tcx.hir().as_local_node_id(param.def_id).unwrap();
- let span = tcx.hir().span(id);
+ let id = tcx.hir().as_local_hir_id(param.def_id).unwrap();
+ let span = tcx.hir().span_by_hir_id(id);
struct_span_err!(tcx.sess, span, E0091, "type parameter `{}` is unused", param.name)
.span_label(span, "unused type parameter")
.emit();
@@ -5725,7 +5725,7 @@
);
handler.note_without_error(&format!("rustc {} running on {}",
option_env!("CFG_VERSION").unwrap_or("unknown_version"),
- ::session::config::host_triple(),
+ crate::session::config::host_triple(),
));
}
diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs
index b90c18e..c058977 100644
--- a/src/librustc_typeck/check/regionck.rs
+++ b/src/librustc_typeck/check/regionck.rs
@@ -72,11 +72,11 @@
//! relation, except that a borrowed pointer never owns its
//! contents.
-use check::dropck;
-use check::FnCtxt;
-use middle::mem_categorization as mc;
-use middle::mem_categorization::Categorization;
-use middle::region;
+use crate::check::dropck;
+use crate::check::FnCtxt;
+use crate::middle::mem_categorization as mc;
+use crate::middle::mem_categorization::Categorization;
+use crate::middle::region;
use rustc::hir::def_id::DefId;
use rustc::infer::outlives::env::OutlivesEnvironment;
use rustc::infer::{self, RegionObligation, SuppressRegionErrors};
diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs
index ffd7c21..1816b74 100644
--- a/src/librustc_typeck/check/upvar.rs
+++ b/src/librustc_typeck/check/upvar.rs
@@ -32,9 +32,9 @@
use super::FnCtxt;
-use middle::expr_use_visitor as euv;
-use middle::mem_categorization as mc;
-use middle::mem_categorization::Categorization;
+use crate::middle::expr_use_visitor as euv;
+use crate::middle::mem_categorization as mc;
+use crate::middle::mem_categorization::Categorization;
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::hir::def_id::LocalDefId;
@@ -650,6 +650,5 @@
}
fn var_name(tcx: TyCtxt, var_hir_id: hir::HirId) -> ast::Name {
- let var_node_id = tcx.hir().hir_to_node_id(var_hir_id);
- tcx.hir().name(var_node_id)
+ tcx.hir().name_by_hir_id(var_hir_id)
}
diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs
index 9788170..b51fd58 100644
--- a/src/librustc_typeck/check/wfcheck.rs
+++ b/src/librustc_typeck/check/wfcheck.rs
@@ -1,7 +1,7 @@
-use check::{Inherited, FnCtxt};
-use constrained_type_params::{identify_constrained_type_params, Parameter};
+use crate::check::{Inherited, FnCtxt};
+use crate::constrained_type_params::{identify_constrained_type_params, Parameter};
-use hir::def_id::DefId;
+use crate::hir::def_id::DefId;
use rustc::traits::{self, ObligationCauseCode};
use rustc::ty::{self, Lift, Ty, TyCtxt, TyKind, GenericParamDefKind, TypeFoldable, ToPredicate};
use rustc::ty::subst::{Subst, Substs};
@@ -62,11 +62,11 @@
/// not included it frequently leads to confusing errors in fn bodies. So it's better to check
/// the types first.
pub fn check_item_well_formed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- let item = tcx.hir().expect_item(node_id);
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ let item = tcx.hir().expect_item_by_hir_id(hir_id);
- debug!("check_item_well_formed(it.id={}, it.name={})",
- item.id,
+ debug!("check_item_well_formed(it.hir_id={:?}, it.name={})",
+ item.hir_id,
tcx.item_path_str(def_id));
match item.node {
@@ -88,7 +88,7 @@
// won't be allowed unless there's an *explicit* implementation of `Send`
// for `T`
hir::ItemKind::Impl(_, polarity, defaultness, _, ref trait_ref, ref self_ty, _) => {
- let is_auto = tcx.impl_trait_ref(tcx.hir().local_def_id(item.id))
+ let is_auto = tcx.impl_trait_ref(tcx.hir().local_def_id_from_hir_id(item.hir_id))
.map_or(false, |trait_ref| tcx.trait_is_auto(trait_ref.def_id));
if let (hir::Defaultness::Default { .. }, true) = (defaultness, is_auto) {
tcx.sess.span_err(item.span, "impls of auto traits cannot be default");
diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs
index 238b087..e02e706 100644
--- a/src/librustc_typeck/check/writeback.rs
+++ b/src/librustc_typeck/check/writeback.rs
@@ -2,7 +2,7 @@
// unresolved type variables and replaces "ty_var" types with their
// substitutions.
-use check::FnCtxt;
+use crate::check::FnCtxt;
use errors::DiagnosticBuilder;
use rustc::hir;
use rustc::hir::def_id::{DefId, DefIndex};
@@ -407,8 +407,7 @@
if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
if self.rustc_dump_user_substs {
// This is a unit-testing mechanism.
- let node_id = self.tcx().hir().hir_to_node_id(hir_id);
- let span = self.tcx().hir().span(node_id);
+ let span = self.tcx().hir().span_by_hir_id(hir_id);
// We need to buffer the errors in order to guarantee a consistent
// order when emitting them.
let err = self.tcx().sess.struct_span_err(
@@ -739,15 +738,14 @@
impl Locatable for DefIndex {
fn to_span(&self, tcx: &TyCtxt) -> Span {
- let node_id = tcx.hir().def_index_to_node_id(*self);
- tcx.hir().span(node_id)
+ let hir_id = tcx.hir().def_index_to_hir_id(*self);
+ tcx.hir().span_by_hir_id(hir_id)
}
}
impl Locatable for hir::HirId {
fn to_span(&self, tcx: &TyCtxt) -> Span {
- let node_id = tcx.hir().hir_to_node_id(*self);
- tcx.hir().span(node_id)
+ tcx.hir().span_by_hir_id(*self)
}
}
diff --git a/src/librustc_typeck/check_unused.rs b/src/librustc_typeck/check_unused.rs
index a7e19fc..18194ee 100644
--- a/src/librustc_typeck/check_unused.rs
+++ b/src/librustc_typeck/check_unused.rs
@@ -1,4 +1,4 @@
-use lint;
+use crate::lint;
use rustc::ty::TyCtxt;
use errors::Applicability;
diff --git a/src/librustc_typeck/coherence/builtin.rs b/src/librustc_typeck/coherence/builtin.rs
index bd2373d..3ec08f2 100644
--- a/src/librustc_typeck/coherence/builtin.rs
+++ b/src/librustc_typeck/coherence/builtin.rs
@@ -76,7 +76,7 @@
fn visit_implementation_of_copy<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_did: DefId) {
debug!("visit_implementation_of_copy: impl_did={:?}", impl_did);
- let impl_node_id = if let Some(n) = tcx.hir().as_local_node_id(impl_did) {
+ let impl_hir_id = if let Some(n) = tcx.hir().as_local_hir_id(impl_did) {
n
} else {
debug!("visit_implementation_of_copy(): impl not in this crate");
@@ -87,7 +87,7 @@
debug!("visit_implementation_of_copy: self_type={:?} (bound)",
self_type);
- let span = tcx.hir().span(impl_node_id);
+ let span = tcx.hir().span_by_hir_id(impl_hir_id);
let param_env = tcx.param_env(impl_did);
assert!(!self_type.has_escaping_bound_vars());
@@ -97,7 +97,7 @@
match param_env.can_type_implement_copy(tcx, self_type) {
Ok(()) => {}
Err(CopyImplementationError::InfrigingFields(fields)) => {
- let item = tcx.hir().expect_item(impl_node_id);
+ let item = tcx.hir().expect_item_by_hir_id(impl_hir_id);
let span = if let ItemKind::Impl(.., Some(ref tr), _, _) = item.node {
tr.path.span
} else {
@@ -114,7 +114,7 @@
err.emit()
}
Err(CopyImplementationError::NotAnAdt) => {
- let item = tcx.hir().expect_item(impl_node_id);
+ let item = tcx.hir().expect_item_by_hir_id(impl_hir_id);
let span = if let ItemKind::Impl(.., ref ty, _) = item.node {
ty.span
} else {
diff --git a/src/librustc_typeck/coherence/inherent_impls_overlap.rs b/src/librustc_typeck/coherence/inherent_impls_overlap.rs
index 52dee29..138c598 100644
--- a/src/librustc_typeck/coherence/inherent_impls_overlap.rs
+++ b/src/librustc_typeck/coherence/inherent_impls_overlap.rs
@@ -1,11 +1,11 @@
-use namespace::Namespace;
+use crate::namespace::Namespace;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::traits::{self, IntercrateMode};
use rustc::ty::TyCtxt;
-use lint;
+use crate::lint;
pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
crate_num: CrateNum) {
diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs
index 853c4c8..4eee68b 100644
--- a/src/librustc_typeck/coherence/mod.rs
+++ b/src/librustc_typeck/coherence/mod.rs
@@ -5,7 +5,7 @@
// done by the orphan and overlap modules. Then we build up various
// mappings. That mapping code resides here.
-use hir::def_id::{DefId, LOCAL_CRATE};
+use crate::hir::def_id::{DefId, LOCAL_CRATE};
use rustc::traits;
use rustc::ty::{self, TyCtxt, TypeFoldable};
use rustc::ty::query::Providers;
diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs
index 9dc74c5..bb4fba1 100644
--- a/src/librustc_typeck/collect.rs
+++ b/src/librustc_typeck/collect.rs
@@ -14,13 +14,13 @@
//! At present, however, we do run collection across all items in the
//! crate as a kind of pass. This should eventually be factored away.
-use astconv::{AstConv, Bounds};
-use constrained_type_params as ctp;
-use check::intrinsic::intrisic_operation_unsafety;
-use lint;
-use middle::lang_items::SizedTraitLangItem;
-use middle::resolve_lifetime as rl;
-use middle::weak_lang_items;
+use crate::astconv::{AstConv, Bounds};
+use crate::constrained_type_params as ctp;
+use crate::check::intrinsic::intrisic_operation_unsafety;
+use crate::lint;
+use crate::middle::lang_items::SizedTraitLangItem;
+use crate::middle::resolve_lifetime as rl;
+use crate::middle::weak_lang_items;
use rustc::mir::mono::Linkage;
use rustc::ty::query::Providers;
use rustc::ty::subst::Substs;
@@ -68,7 +68,7 @@
);
}
-pub fn provide(providers: &mut Providers) {
+pub fn provide(providers: &mut Providers<'_>) {
*providers = Providers {
type_of,
generics_of,
@@ -737,8 +737,8 @@
}
fn trait_def<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx ty::TraitDef {
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- let item = tcx.hir().expect_item(node_id);
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ let item = tcx.hir().expect_item_by_hir_id(hir_id);
let (is_auto, unsafety) = match item.node {
hir::ItemKind::Trait(is_auto, unsafety, ..) => (is_auto == hir::IsAuto::Yes, unsafety),
@@ -1509,8 +1509,8 @@
) -> Option<ty::TraitRef<'tcx>> {
let icx = ItemCtxt::new(tcx, def_id);
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- match tcx.hir().expect_item(node_id).node {
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ match tcx.hir().expect_item_by_hir_id(hir_id).node {
hir::ItemKind::Impl(.., ref opt_trait_ref, _, _) => {
opt_trait_ref.as_ref().map(|ast_trait_ref| {
let selfty = tcx.type_of(def_id);
@@ -1522,8 +1522,8 @@
}
fn impl_polarity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> hir::ImplPolarity {
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- match tcx.hir().expect_item(node_id).node {
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ match tcx.hir().expect_item_by_hir_id(hir_id).node {
hir::ItemKind::Impl(_, polarity, ..) => polarity,
ref item => bug!("impl_polarity: {:?} not an impl", item),
}
diff --git a/src/librustc_typeck/constrained_type_params.rs b/src/librustc_typeck/constrained_type_params.rs
index 199ea31..d1f33b6 100644
--- a/src/librustc_typeck/constrained_type_params.rs
+++ b/src/librustc_typeck/constrained_type_params.rs
@@ -124,7 +124,7 @@
/// which is determined by 1, which requires `U`, that is determined
/// by 0. I should probably pick a less tangled example, but I can't
/// think of any.
-pub fn setup_constraining_predicates<'tcx>(tcx: TyCtxt,
+pub fn setup_constraining_predicates<'tcx>(tcx: TyCtxt<'_, '_, '_>,
predicates: &mut [(ty::Predicate<'tcx>, Span)],
impl_trait_ref: Option<ty::TraitRef<'tcx>>,
input_parameters: &mut FxHashSet<Parameter>)
diff --git a/src/librustc_typeck/impl_wf_check.rs b/src/librustc_typeck/impl_wf_check.rs
index 07f5fca..6de06b6 100644
--- a/src/librustc_typeck/impl_wf_check.rs
+++ b/src/librustc_typeck/impl_wf_check.rs
@@ -8,7 +8,7 @@
//! specialization errors. These things can (and probably should) be
//! fixed, but for the moment it's easier to do these checks early.
-use constrained_type_params as ctp;
+use crate::constrained_type_params as ctp;
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::hir::def_id::DefId;
@@ -162,7 +162,7 @@
// used elsewhere are not projected back out.
}
-fn report_unused_parameter(tcx: TyCtxt,
+fn report_unused_parameter(tcx: TyCtxt<'_, '_, '_>,
span: Span,
kind: &str,
name: &str)
diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs
index 8d77310..e99ec53 100644
--- a/src/librustc_typeck/lib.rs
+++ b/src/librustc_typeck/lib.rs
@@ -72,17 +72,15 @@
#![recursion_limit="256"]
+#![deny(rust_2018_idioms)]
+#![allow(explicit_outlives_requirements)]
+
+#![allow(elided_lifetimes_in_paths)] // WIP
+
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
-extern crate syntax_pos;
-
-extern crate arena;
#[macro_use] extern crate rustc;
-extern crate rustc_data_structures;
-extern crate rustc_errors as errors;
-extern crate rustc_target;
-extern crate smallvec;
// N.B., this module needs to be declared first so diagnostics are
// registered before they are used.
@@ -141,7 +139,7 @@
}
}
-fn require_c_abi_if_variadic(tcx: TyCtxt,
+fn require_c_abi_if_variadic(tcx: TyCtxt<'_, '_, '_>,
decl: &hir::FnDecl,
abi: Abi,
span: Span) {
@@ -310,7 +308,7 @@
}
}
-pub fn provide(providers: &mut Providers) {
+pub fn provide(providers: &mut Providers<'_>) {
collect::provide(providers);
coherence::provide(providers);
check::provide(providers);
diff --git a/src/librustc_typeck/outlives/explicit.rs b/src/librustc_typeck/outlives/explicit.rs
index 38f4b37..574086f 100644
--- a/src/librustc_typeck/outlives/explicit.rs
+++ b/src/librustc_typeck/outlives/explicit.rs
@@ -1,6 +1,6 @@
use rustc::hir::def_id::DefId;
use rustc::ty::{self, OutlivesPredicate, TyCtxt};
-use util::nodemap::FxHashMap;
+use crate::util::nodemap::FxHashMap;
use super::utils::*;
diff --git a/src/librustc_typeck/outlives/implicit_infer.rs b/src/librustc_typeck/outlives/implicit_infer.rs
index e388a3e..0ff884d 100644
--- a/src/librustc_typeck/outlives/implicit_infer.rs
+++ b/src/librustc_typeck/outlives/implicit_infer.rs
@@ -1,5 +1,4 @@
-use rustc::hir;
-use hir::Node;
+use rustc::hir::{self, Node};
use rustc::hir::def_id::DefId;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ty::subst::{Kind, Subst, UnpackedKind};
diff --git a/src/librustc_typeck/outlives/mod.rs b/src/librustc_typeck/outlives/mod.rs
index f0310f2..b3634d3 100644
--- a/src/librustc_typeck/outlives/mod.rs
+++ b/src/librustc_typeck/outlives/mod.rs
@@ -12,7 +12,7 @@
pub mod test;
mod utils;
-pub fn provide(providers: &mut Providers) {
+pub fn provide(providers: &mut Providers<'_>) {
*providers = Providers {
inferred_outlives_of,
inferred_outlives_crate,
diff --git a/src/librustc_typeck/variance/mod.rs b/src/librustc_typeck/variance/mod.rs
index afb6a68..3474227 100644
--- a/src/librustc_typeck/variance/mod.rs
+++ b/src/librustc_typeck/variance/mod.rs
@@ -46,12 +46,12 @@
fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId)
-> Lrc<Vec<ty::Variance>> {
- let id = tcx.hir().as_local_node_id(item_def_id).expect("expected local def-id");
+ let id = tcx.hir().as_local_hir_id(item_def_id).expect("expected local def-id");
let unsupported = || {
// Variance not relevant.
- span_bug!(tcx.hir().span(id), "asked to compute variance for wrong kind of item")
+ span_bug!(tcx.hir().span_by_hir_id(id), "asked to compute variance for wrong kind of item")
};
- match tcx.hir().get(id) {
+ match tcx.hir().get_by_hir_id(id) {
Node::Item(item) => match item.node {
hir::ItemKind::Enum(..) |
hir::ItemKind::Struct(..) |
diff --git a/src/librustc_typeck/variance/terms.rs b/src/librustc_typeck/variance/terms.rs
index d53e2d2..ec0acfb 100644
--- a/src/librustc_typeck/variance/terms.rs
+++ b/src/librustc_typeck/variance/terms.rs
@@ -15,7 +15,7 @@
use syntax::ast;
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use util::nodemap::NodeMap;
+use crate::util::nodemap::NodeMap;
use self::VarianceTerm::*;
diff --git a/src/libserialize/Cargo.toml b/src/libserialize/Cargo.toml
index 3e04081..949af0e 100644
--- a/src/libserialize/Cargo.toml
+++ b/src/libserialize/Cargo.toml
@@ -2,6 +2,7 @@
authors = ["The Rust Project Developers"]
name = "serialize"
version = "0.0.0"
+edition = "2018"
[lib]
name = "serialize"
diff --git a/src/libserialize/collection_impls.rs b/src/libserialize/collection_impls.rs
index f3afc3b..c0a8fa9 100644
--- a/src/libserialize/collection_impls.rs
+++ b/src/libserialize/collection_impls.rs
@@ -2,7 +2,7 @@
use std::hash::{Hash, BuildHasher};
-use {Decodable, Encodable, Decoder, Encoder};
+use crate::{Decodable, Encodable, Decoder, Encoder};
use std::collections::{LinkedList, VecDeque, BTreeMap, BTreeSet, HashMap, HashSet};
use std::rc::Rc;
use std::sync::Arc;
diff --git a/src/libserialize/hex.rs b/src/libserialize/hex.rs
index 6127440..73b9122 100644
--- a/src/libserialize/hex.rs
+++ b/src/libserialize/hex.rs
@@ -60,7 +60,7 @@
}
impl fmt::Display for FromHexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
InvalidHexCharacter(ch, idx) =>
write!(f, "Invalid character '{}' at position {}", ch, idx),
@@ -145,8 +145,8 @@
#[cfg(test)]
mod tests {
extern crate test;
- use self::test::Bencher;
- use hex::{FromHex, ToHex};
+ use test::Bencher;
+ use crate::hex::{FromHex, ToHex};
#[test]
pub fn test_to_hex() {
diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs
index 362b457..5b3444b 100644
--- a/src/libserialize/json.rs
+++ b/src/libserialize/json.rs
@@ -199,9 +199,8 @@
use std::str::FromStr;
use std::string;
use std::{char, f64, fmt, str};
-use std;
-use Encodable;
+use crate::Encodable;
/// Represents a json value
#[derive(Clone, PartialEq, PartialOrd, Debug)]
@@ -221,8 +220,8 @@
pub struct PrettyJson<'a> { inner: &'a Json }
-pub struct AsJson<'a, T: 'a> { inner: &'a T }
-pub struct AsPrettyJson<'a, T: 'a> { inner: &'a T, indent: Option<usize> }
+pub struct AsJson<'a, T> { inner: &'a T }
+pub struct AsPrettyJson<'a, T> { inner: &'a T, indent: Option<usize> }
/// The errors that can arise while parsing a JSON stream.
#[derive(Clone, Copy, PartialEq, Debug)]
@@ -295,18 +294,18 @@
}
/// Shortcut function to decode a JSON `&str` into an object
-pub fn decode<T: ::Decodable>(s: &str) -> DecodeResult<T> {
+pub fn decode<T: crate::Decodable>(s: &str) -> DecodeResult<T> {
let json = match from_str(s) {
Ok(x) => x,
Err(e) => return Err(ParseError(e))
};
let mut decoder = Decoder::new(json);
- ::Decodable::decode(&mut decoder)
+ crate::Decodable::decode(&mut decoder)
}
/// Shortcut function to encode a `T` into a JSON `String`
-pub fn encode<T: ::Encodable>(object: &T) -> Result<string::String, EncoderError> {
+pub fn encode<T: crate::Encodable>(object: &T) -> Result<string::String, EncoderError> {
let mut s = String::new();
{
let mut encoder = Encoder::new(&mut s);
@@ -316,7 +315,7 @@
}
impl fmt::Display for ErrorCode {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
error_str(*self).fmt(f)
}
}
@@ -326,14 +325,14 @@
}
impl fmt::Display for ParserError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error
fmt::Debug::fmt(self, f)
}
}
impl fmt::Display for DecoderError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error
fmt::Debug::fmt(self, f)
}
@@ -344,7 +343,7 @@
}
impl fmt::Display for EncoderError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error
fmt::Debug::fmt(self, f)
}
@@ -477,7 +476,7 @@
})
}
-impl<'a> ::Encoder for Encoder<'a> {
+impl<'a> crate::Encoder for Encoder<'a> {
type Error = EncoderError;
fn emit_unit(&mut self) -> EncodeResult {
@@ -727,7 +726,7 @@
}
}
-impl<'a> ::Encoder for PrettyEncoder<'a> {
+impl<'a> crate::Encoder for PrettyEncoder<'a> {
type Error = EncoderError;
fn emit_unit(&mut self) -> EncodeResult {
@@ -997,7 +996,7 @@
}
impl Encodable for Json {
- fn encode<E: ::Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
+ fn encode<E: crate::Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
match *self {
Json::I64(v) => v.encode(e),
Json::U64(v) => v.encode(e),
@@ -1013,20 +1012,20 @@
/// Create an `AsJson` wrapper which can be used to print a value as JSON
/// on-the-fly via `write!`
-pub fn as_json<T>(t: &T) -> AsJson<T> {
+pub fn as_json<T>(t: &T) -> AsJson<'_, T> {
AsJson { inner: t }
}
/// Create an `AsPrettyJson` wrapper which can be used to print a value as JSON
/// on-the-fly via `write!`
-pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<T> {
+pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<'_, T> {
AsPrettyJson { inner: t, indent: None }
}
impl Json {
/// Borrow this json object as a pretty object to generate a pretty
/// representation for it via `Display`.
- pub fn pretty(&self) -> PrettyJson {
+ pub fn pretty(&self) -> PrettyJson<'_> {
PrettyJson { inner: self }
}
@@ -1300,7 +1299,7 @@
/// Provides access to the StackElement at a given index.
/// lower indices are at the bottom of the stack while higher indices are
/// at the top.
- pub fn get(&self, idx: usize) -> StackElement {
+ pub fn get(&self, idx: usize) -> StackElement<'_> {
match self.stack[idx] {
InternalIndex(i) => StackElement::Index(i),
InternalKey(start, size) => {
@@ -1311,8 +1310,8 @@
}
}
- /// Compares this stack with an array of StackElements.
- pub fn is_equal_to(&self, rhs: &[StackElement]) -> bool {
+ /// Compares this stack with an array of StackElement<'_>s.
+ pub fn is_equal_to(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() != rhs.len() { return false; }
for (i, r) in rhs.iter().enumerate() {
if self.get(i) != *r { return false; }
@@ -1322,7 +1321,7 @@
/// Returns true if the bottom-most elements of this stack are the same as
/// the ones passed as parameter.
- pub fn starts_with(&self, rhs: &[StackElement]) -> bool {
+ pub fn starts_with(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() < rhs.len() { return false; }
for (i, r) in rhs.iter().enumerate() {
if self.get(i) != *r { return false; }
@@ -1332,7 +1331,7 @@
/// Returns true if the top-most elements of this stack are the same as
/// the ones passed as parameter.
- pub fn ends_with(&self, rhs: &[StackElement]) -> bool {
+ pub fn ends_with(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() < rhs.len() { return false; }
let offset = self.stack.len() - rhs.len();
for (i, r) in rhs.iter().enumerate() {
@@ -1342,7 +1341,7 @@
}
/// Returns the top-most element (if any).
- pub fn top(&self) -> Option<StackElement> {
+ pub fn top(&self) -> Option<StackElement<'_>> {
match self.stack.last() {
None => None,
Some(&InternalIndex(i)) => Some(StackElement::Index(i)),
@@ -2115,7 +2114,7 @@
}
}
-impl ::Decoder for Decoder {
+impl crate::Decoder for Decoder {
type Error = DecoderError;
fn read_nil(&mut self) -> DecodeResult<()> {
@@ -2172,7 +2171,7 @@
Err(ExpectedError("single character string".to_owned(), s.to_string()))
}
- fn read_str(&mut self) -> DecodeResult<Cow<str>> {
+ fn read_str(&mut self) -> DecodeResult<Cow<'_, str>> {
expect!(self.pop(), String).map(Cow::Owned)
}
@@ -2518,7 +2517,7 @@
impl fmt::Display for Json {
/// Encodes a json value into a string
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = Encoder::new(&mut shim);
match self.encode(&mut encoder) {
@@ -2530,7 +2529,7 @@
impl<'a> fmt::Display for PrettyJson<'a> {
/// Encodes a json value into a string
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = PrettyEncoder::new(&mut shim);
match self.inner.encode(&mut encoder) {
@@ -2542,7 +2541,7 @@
impl<'a, T: Encodable> fmt::Display for AsJson<'a, T> {
/// Encodes a json value into a string
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = Encoder::new(&mut shim);
match self.inner.encode(&mut encoder) {
@@ -2562,7 +2561,7 @@
impl<'a, T: Encodable> fmt::Display for AsPrettyJson<'a, T> {
/// Encodes a json value into a string
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = PrettyEncoder::new(&mut shim);
if let Some(n) = self.indent {
@@ -2584,1220 +2583,13 @@
#[cfg(test)]
mod tests {
+ // Benchmarks and tests that require private items
+
extern crate test;
- use self::Animal::*;
- use self::test::Bencher;
- use {Encodable, Decodable};
- use super::Json::*;
- use super::ErrorCode::*;
- use super::ParserError::*;
- use super::DecoderError::*;
- use super::JsonEvent::*;
- use super::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser,
- StackElement, Stack, Decoder, Encoder, EncoderError};
- use std::{i64, u64, f32, f64};
- use std::io::prelude::*;
- use std::collections::BTreeMap;
+ use test::Bencher;
+ use super::{from_str, Parser, StackElement, Stack};
use std::string;
- #[derive(RustcDecodable, Eq, PartialEq, Debug)]
- struct OptionData {
- opt: Option<usize>,
- }
-
- #[test]
- fn test_decode_option_none() {
- let s ="{}";
- let obj: OptionData = super::decode(s).unwrap();
- assert_eq!(obj, OptionData { opt: None });
- }
-
- #[test]
- fn test_decode_option_some() {
- let s = "{ \"opt\": 10 }";
- let obj: OptionData = super::decode(s).unwrap();
- assert_eq!(obj, OptionData { opt: Some(10) });
- }
-
- #[test]
- fn test_decode_option_malformed() {
- check_err::<OptionData>("{ \"opt\": [] }",
- ExpectedError("Number".to_string(), "[]".to_string()));
- check_err::<OptionData>("{ \"opt\": false }",
- ExpectedError("Number".to_string(), "false".to_string()));
- }
-
- #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
- enum Animal {
- Dog,
- Frog(string::String, isize)
- }
-
- #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
- struct Inner {
- a: (),
- b: usize,
- c: Vec<string::String>,
- }
-
- #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
- struct Outer {
- inner: Vec<Inner>,
- }
-
- fn mk_object(items: &[(string::String, Json)]) -> Json {
- let mut d = BTreeMap::new();
-
- for item in items {
- match *item {
- (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
- }
- };
-
- Object(d)
- }
-
- #[test]
- fn test_from_str_trait() {
- let s = "null";
- assert!(s.parse::<Json>().unwrap() == s.parse().unwrap());
- }
-
- #[test]
- fn test_write_null() {
- assert_eq!(Null.to_string(), "null");
- assert_eq!(Null.pretty().to_string(), "null");
- }
-
- #[test]
- fn test_write_i64() {
- assert_eq!(U64(0).to_string(), "0");
- assert_eq!(U64(0).pretty().to_string(), "0");
-
- assert_eq!(U64(1234).to_string(), "1234");
- assert_eq!(U64(1234).pretty().to_string(), "1234");
-
- assert_eq!(I64(-5678).to_string(), "-5678");
- assert_eq!(I64(-5678).pretty().to_string(), "-5678");
-
- assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000");
- assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000");
- }
-
- #[test]
- fn test_write_f64() {
- assert_eq!(F64(3.0).to_string(), "3.0");
- assert_eq!(F64(3.0).pretty().to_string(), "3.0");
-
- assert_eq!(F64(3.1).to_string(), "3.1");
- assert_eq!(F64(3.1).pretty().to_string(), "3.1");
-
- assert_eq!(F64(-1.5).to_string(), "-1.5");
- assert_eq!(F64(-1.5).pretty().to_string(), "-1.5");
-
- assert_eq!(F64(0.5).to_string(), "0.5");
- assert_eq!(F64(0.5).pretty().to_string(), "0.5");
-
- assert_eq!(F64(f64::NAN).to_string(), "null");
- assert_eq!(F64(f64::NAN).pretty().to_string(), "null");
-
- assert_eq!(F64(f64::INFINITY).to_string(), "null");
- assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null");
-
- assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null");
- assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null");
- }
-
- #[test]
- fn test_write_str() {
- assert_eq!(String("".to_string()).to_string(), "\"\"");
- assert_eq!(String("".to_string()).pretty().to_string(), "\"\"");
-
- assert_eq!(String("homura".to_string()).to_string(), "\"homura\"");
- assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\"");
- }
-
- #[test]
- fn test_write_bool() {
- assert_eq!(Boolean(true).to_string(), "true");
- assert_eq!(Boolean(true).pretty().to_string(), "true");
-
- assert_eq!(Boolean(false).to_string(), "false");
- assert_eq!(Boolean(false).pretty().to_string(), "false");
- }
-
- #[test]
- fn test_write_array() {
- assert_eq!(Array(vec![]).to_string(), "[]");
- assert_eq!(Array(vec![]).pretty().to_string(), "[]");
-
- assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]");
- assert_eq!(
- Array(vec![Boolean(true)]).pretty().to_string(),
- "\
- [\n \
- true\n\
- ]"
- );
-
- let long_test_array = Array(vec![
- Boolean(false),
- Null,
- Array(vec![String("foo\nbar".to_string()), F64(3.5)])]);
-
- assert_eq!(long_test_array.to_string(),
- "[false,null,[\"foo\\nbar\",3.5]]");
- assert_eq!(
- long_test_array.pretty().to_string(),
- "\
- [\n \
- false,\n \
- null,\n \
- [\n \
- \"foo\\nbar\",\n \
- 3.5\n \
- ]\n\
- ]"
- );
- }
-
- #[test]
- fn test_write_object() {
- assert_eq!(mk_object(&[]).to_string(), "{}");
- assert_eq!(mk_object(&[]).pretty().to_string(), "{}");
-
- assert_eq!(
- mk_object(&[
- ("a".to_string(), Boolean(true))
- ]).to_string(),
- "{\"a\":true}"
- );
- assert_eq!(
- mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(),
- "\
- {\n \
- \"a\": true\n\
- }"
- );
-
- let complex_obj = mk_object(&[
- ("b".to_string(), Array(vec![
- mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
- mk_object(&[("d".to_string(), String("".to_string()))])
- ]))
- ]);
-
- assert_eq!(
- complex_obj.to_string(),
- "{\
- \"b\":[\
- {\"c\":\"\\f\\r\"},\
- {\"d\":\"\"}\
- ]\
- }"
- );
- assert_eq!(
- complex_obj.pretty().to_string(),
- "\
- {\n \
- \"b\": [\n \
- {\n \
- \"c\": \"\\f\\r\"\n \
- },\n \
- {\n \
- \"d\": \"\"\n \
- }\n \
- ]\n\
- }"
- );
-
- let a = mk_object(&[
- ("a".to_string(), Boolean(true)),
- ("b".to_string(), Array(vec![
- mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
- mk_object(&[("d".to_string(), String("".to_string()))])
- ]))
- ]);
-
- // We can't compare the strings directly because the object fields be
- // printed in a different order.
- assert_eq!(a.clone(), a.to_string().parse().unwrap());
- assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap());
- }
-
- #[test]
- fn test_write_enum() {
- let animal = Dog;
- assert_eq!(
- super::as_json(&animal).to_string(),
- "\"Dog\""
- );
- assert_eq!(
- super::as_pretty_json(&animal).to_string(),
- "\"Dog\""
- );
-
- let animal = Frog("Henry".to_string(), 349);
- assert_eq!(
- super::as_json(&animal).to_string(),
- "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"
- );
- assert_eq!(
- super::as_pretty_json(&animal).to_string(),
- "{\n \
- \"variant\": \"Frog\",\n \
- \"fields\": [\n \
- \"Henry\",\n \
- 349\n \
- ]\n\
- }"
- );
- }
-
- macro_rules! check_encoder_for_simple {
- ($value:expr, $expected:expr) => ({
- let s = super::as_json(&$value).to_string();
- assert_eq!(s, $expected);
-
- let s = super::as_pretty_json(&$value).to_string();
- assert_eq!(s, $expected);
- })
- }
-
- #[test]
- fn test_write_some() {
- check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\"");
- }
-
- #[test]
- fn test_write_none() {
- check_encoder_for_simple!(None::<string::String>, "null");
- }
-
- #[test]
- fn test_write_char() {
- check_encoder_for_simple!('a', "\"a\"");
- check_encoder_for_simple!('\t', "\"\\t\"");
- check_encoder_for_simple!('\u{0000}', "\"\\u0000\"");
- check_encoder_for_simple!('\u{001b}', "\"\\u001b\"");
- check_encoder_for_simple!('\u{007f}', "\"\\u007f\"");
- check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\"");
- check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\"");
- check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\"");
- }
-
- #[test]
- fn test_trailing_characters() {
- assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5)));
- assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5)));
- assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6)));
- assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2)));
- assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
- assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
- }
-
- #[test]
- fn test_read_identifiers() {
- assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3)));
-
- assert_eq!(from_str("null"), Ok(Null));
- assert_eq!(from_str("true"), Ok(Boolean(true)));
- assert_eq!(from_str("false"), Ok(Boolean(false)));
- assert_eq!(from_str(" null "), Ok(Null));
- assert_eq!(from_str(" true "), Ok(Boolean(true)));
- assert_eq!(from_str(" false "), Ok(Boolean(false)));
- }
-
- #[test]
- fn test_decode_identifiers() {
- let v: () = super::decode("null").unwrap();
- assert_eq!(v, ());
-
- let v: bool = super::decode("true").unwrap();
- assert_eq!(v, true);
-
- let v: bool = super::decode("false").unwrap();
- assert_eq!(v, false);
- }
-
- #[test]
- fn test_read_number() {
- assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1)));
- assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1)));
- assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1)));
- assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2)));
- assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2)));
- assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3)));
- assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3)));
- assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4)));
-
- assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20)));
- assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21)));
-
- assert_eq!(from_str("3"), Ok(U64(3)));
- assert_eq!(from_str("3.1"), Ok(F64(3.1)));
- assert_eq!(from_str("-1.2"), Ok(F64(-1.2)));
- assert_eq!(from_str("0.4"), Ok(F64(0.4)));
- assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5)));
- assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15)));
- assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01)));
- assert_eq!(from_str(" 3 "), Ok(U64(3)));
-
- assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN)));
- assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64)));
- assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
- }
-
- #[test]
- fn test_decode_numbers() {
- let v: f64 = super::decode("3").unwrap();
- assert_eq!(v, 3.0);
-
- let v: f64 = super::decode("3.1").unwrap();
- assert_eq!(v, 3.1);
-
- let v: f64 = super::decode("-1.2").unwrap();
- assert_eq!(v, -1.2);
-
- let v: f64 = super::decode("0.4").unwrap();
- assert_eq!(v, 0.4);
-
- let v: f64 = super::decode("0.4e5").unwrap();
- assert_eq!(v, 0.4e5);
-
- let v: f64 = super::decode("0.4e15").unwrap();
- assert_eq!(v, 0.4e15);
-
- let v: f64 = super::decode("0.4e-01").unwrap();
- assert_eq!(v, 0.4e-01);
-
- let v: u64 = super::decode("0").unwrap();
- assert_eq!(v, 0);
-
- let v: u64 = super::decode("18446744073709551615").unwrap();
- assert_eq!(v, u64::MAX);
-
- let v: i64 = super::decode("-9223372036854775808").unwrap();
- assert_eq!(v, i64::MIN);
-
- let v: i64 = super::decode("9223372036854775807").unwrap();
- assert_eq!(v, i64::MAX);
-
- let res: DecodeResult<i64> = super::decode("765.25");
- assert_eq!(res, Err(ExpectedError("Integer".to_string(),
- "765.25".to_string())));
- }
-
- #[test]
- fn test_read_str() {
- assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
- assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
-
- assert_eq!(from_str("\"\""), Ok(String("".to_string())));
- assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
- assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
- assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
- assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
- assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
- assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
- assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
- assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string())));
- assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
- }
-
- #[test]
- fn test_decode_str() {
- let s = [("\"\"", ""),
- ("\"foo\"", "foo"),
- ("\"\\\"\"", "\""),
- ("\"\\b\"", "\x08"),
- ("\"\\n\"", "\n"),
- ("\"\\r\"", "\r"),
- ("\"\\t\"", "\t"),
- ("\"\\u12ab\"", "\u{12ab}"),
- ("\"\\uAB12\"", "\u{AB12}")];
-
- for &(i, o) in &s {
- let v: string::String = super::decode(i).unwrap();
- assert_eq!(v, o);
- }
- }
-
- #[test]
- fn test_read_array() {
- assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
- assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
- assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
- assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
-
- assert_eq!(from_str("[]"), Ok(Array(vec![])));
- assert_eq!(from_str("[ ]"), Ok(Array(vec![])));
- assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)])));
- assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)])));
- assert_eq!(from_str("[null]"), Ok(Array(vec![Null])));
- assert_eq!(from_str("[3, 1]"),
- Ok(Array(vec![U64(3), U64(1)])));
- assert_eq!(from_str("\n[3, 2]\n"),
- Ok(Array(vec![U64(3), U64(2)])));
- assert_eq!(from_str("[2, [4, 1]]"),
- Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])])));
- }
-
- #[test]
- fn test_decode_array() {
- let v: Vec<()> = super::decode("[]").unwrap();
- assert_eq!(v, []);
-
- let v: Vec<()> = super::decode("[null]").unwrap();
- assert_eq!(v, [()]);
-
- let v: Vec<bool> = super::decode("[true]").unwrap();
- assert_eq!(v, [true]);
-
- let v: Vec<isize> = super::decode("[3, 1]").unwrap();
- assert_eq!(v, [3, 1]);
-
- let v: Vec<Vec<usize>> = super::decode("[[3], [1, 2]]").unwrap();
- assert_eq!(v, [vec![3], vec![1, 2]]);
- }
-
- #[test]
- fn test_decode_tuple() {
- let t: (usize, usize, usize) = super::decode("[1, 2, 3]").unwrap();
- assert_eq!(t, (1, 2, 3));
-
- let t: (usize, string::String) = super::decode("[1, \"two\"]").unwrap();
- assert_eq!(t, (1, "two".to_string()));
- }
-
- #[test]
- fn test_decode_tuple_malformed_types() {
- assert!(super::decode::<(usize, string::String)>("[1, 2]").is_err());
- }
-
- #[test]
- fn test_decode_tuple_malformed_length() {
- assert!(super::decode::<(usize, usize)>("[1, 2, 3]").is_err());
- }
-
- #[test]
- fn test_read_object() {
- assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
- assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3)));
- assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2)));
- assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
- assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5)));
- assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
-
- assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6)));
- assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6)));
- assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7)));
- assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8)));
- assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8)));
-
- assert_eq!(from_str("{}").unwrap(), mk_object(&[]));
- assert_eq!(from_str("{\"a\": 3}").unwrap(),
- mk_object(&[("a".to_string(), U64(3))]));
-
- assert_eq!(from_str(
- "{ \"a\": null, \"b\" : true }").unwrap(),
- mk_object(&[
- ("a".to_string(), Null),
- ("b".to_string(), Boolean(true))]));
- assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
- mk_object(&[
- ("a".to_string(), Null),
- ("b".to_string(), Boolean(true))]));
- assert_eq!(from_str(
- "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
- mk_object(&[
- ("a".to_string(), F64(1.0)),
- ("b".to_string(), Array(vec![Boolean(true)]))
- ]));
- assert_eq!(from_str(
- "{\
- \"a\": 1.0, \
- \"b\": [\
- true,\
- \"foo\\nbar\", \
- { \"c\": {\"d\": null} } \
- ]\
- }").unwrap(),
- mk_object(&[
- ("a".to_string(), F64(1.0)),
- ("b".to_string(), Array(vec![
- Boolean(true),
- String("foo\nbar".to_string()),
- mk_object(&[
- ("c".to_string(), mk_object(&[("d".to_string(), Null)]))
- ])
- ]))
- ]));
- }
-
- #[test]
- fn test_decode_struct() {
- let s = "{
- \"inner\": [
- { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] }
- ]
- }";
-
- let v: Outer = super::decode(s).unwrap();
- assert_eq!(
- v,
- Outer {
- inner: vec![
- Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }
- ]
- }
- );
- }
-
- #[derive(RustcDecodable)]
- struct FloatStruct {
- f: f64,
- a: Vec<f64>
- }
- #[test]
- fn test_decode_struct_with_nan() {
- let s = "{\"f\":null,\"a\":[null,123]}";
- let obj: FloatStruct = super::decode(s).unwrap();
- assert!(obj.f.is_nan());
- assert!(obj.a[0].is_nan());
- assert_eq!(obj.a[1], 123f64);
- }
-
- #[test]
- fn test_decode_option() {
- let value: Option<string::String> = super::decode("null").unwrap();
- assert_eq!(value, None);
-
- let value: Option<string::String> = super::decode("\"jodhpurs\"").unwrap();
- assert_eq!(value, Some("jodhpurs".to_string()));
- }
-
- #[test]
- fn test_decode_enum() {
- let value: Animal = super::decode("\"Dog\"").unwrap();
- assert_eq!(value, Dog);
-
- let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
- let value: Animal = super::decode(s).unwrap();
- assert_eq!(value, Frog("Henry".to_string(), 349));
- }
-
- #[test]
- fn test_decode_map() {
- let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\
- \"fields\":[\"Henry\", 349]}}";
- let mut map: BTreeMap<string::String, Animal> = super::decode(s).unwrap();
-
- assert_eq!(map.remove(&"a".to_string()), Some(Dog));
- assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
- }
-
- #[test]
- fn test_multiline_errors() {
- assert_eq!(from_str("{\n \"foo\":\n \"bar\""),
- Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
- }
-
- #[derive(RustcDecodable)]
- #[allow(dead_code)]
- struct DecodeStruct {
- x: f64,
- y: bool,
- z: string::String,
- w: Vec<DecodeStruct>
- }
- #[derive(RustcDecodable)]
- enum DecodeEnum {
- A(f64),
- B(string::String)
- }
- fn check_err<T: Decodable>(to_parse: &'static str, expected: DecoderError) {
- let res: DecodeResult<T> = match from_str(to_parse) {
- Err(e) => Err(ParseError(e)),
- Ok(json) => Decodable::decode(&mut Decoder::new(json))
- };
- match res {
- Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`",
- to_parse, expected),
- Err(ParseError(e)) => panic!("`{:?}` is not valid json: {:?}",
- to_parse, e),
- Err(e) => {
- assert_eq!(e, expected);
- }
- }
- }
- #[test]
- fn test_decode_errors_struct() {
- check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string()));
- check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}",
- ExpectedError("Number".to_string(), "true".to_string()));
- check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}",
- ExpectedError("Boolean".to_string(), "[]".to_string()));
- check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}",
- ExpectedError("String".to_string(), "{}".to_string()));
- check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}",
- ExpectedError("Array".to_string(), "null".to_string()));
- check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}",
- MissingFieldError("w".to_string()));
- }
- #[test]
- fn test_decode_errors_enum() {
- check_err::<DecodeEnum>("{}",
- MissingFieldError("variant".to_string()));
- check_err::<DecodeEnum>("{\"variant\": 1}",
- ExpectedError("String".to_string(), "1".to_string()));
- check_err::<DecodeEnum>("{\"variant\": \"A\"}",
- MissingFieldError("fields".to_string()));
- check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}",
- ExpectedError("Array".to_string(), "null".to_string()));
- check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}",
- UnknownVariantError("C".to_string()));
- }
-
- #[test]
- fn test_find(){
- let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
- let found_str = json_value.find("dog");
- assert!(found_str.unwrap().as_string().unwrap() == "cat");
- }
-
- #[test]
- fn test_find_path(){
- let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
- let found_str = json_value.find_path(&["dog", "cat", "mouse"]);
- assert!(found_str.unwrap().as_string().unwrap() == "cheese");
- }
-
- #[test]
- fn test_search(){
- let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
- let found_str = json_value.search("mouse").and_then(|j| j.as_string());
- assert!(found_str.unwrap() == "cheese");
- }
-
- #[test]
- fn test_index(){
- let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap();
- let ref array = json_value["animals"];
- assert_eq!(array[0].as_string().unwrap(), "dog");
- assert_eq!(array[1].as_string().unwrap(), "cat");
- assert_eq!(array[2].as_string().unwrap(), "mouse");
- }
-
- #[test]
- fn test_is_object(){
- let json_value = from_str("{}").unwrap();
- assert!(json_value.is_object());
- }
-
- #[test]
- fn test_as_object(){
- let json_value = from_str("{}").unwrap();
- let json_object = json_value.as_object();
- assert!(json_object.is_some());
- }
-
- #[test]
- fn test_is_array(){
- let json_value = from_str("[1, 2, 3]").unwrap();
- assert!(json_value.is_array());
- }
-
- #[test]
- fn test_as_array(){
- let json_value = from_str("[1, 2, 3]").unwrap();
- let json_array = json_value.as_array();
- let expected_length = 3;
- assert!(json_array.is_some() && json_array.unwrap().len() == expected_length);
- }
-
- #[test]
- fn test_is_string(){
- let json_value = from_str("\"dog\"").unwrap();
- assert!(json_value.is_string());
- }
-
- #[test]
- fn test_as_string(){
- let json_value = from_str("\"dog\"").unwrap();
- let json_str = json_value.as_string();
- let expected_str = "dog";
- assert_eq!(json_str, Some(expected_str));
- }
-
- #[test]
- fn test_is_number(){
- let json_value = from_str("12").unwrap();
- assert!(json_value.is_number());
- }
-
- #[test]
- fn test_is_i64(){
- let json_value = from_str("-12").unwrap();
- assert!(json_value.is_i64());
-
- let json_value = from_str("12").unwrap();
- assert!(!json_value.is_i64());
-
- let json_value = from_str("12.0").unwrap();
- assert!(!json_value.is_i64());
- }
-
- #[test]
- fn test_is_u64(){
- let json_value = from_str("12").unwrap();
- assert!(json_value.is_u64());
-
- let json_value = from_str("-12").unwrap();
- assert!(!json_value.is_u64());
-
- let json_value = from_str("12.0").unwrap();
- assert!(!json_value.is_u64());
- }
-
- #[test]
- fn test_is_f64(){
- let json_value = from_str("12").unwrap();
- assert!(!json_value.is_f64());
-
- let json_value = from_str("-12").unwrap();
- assert!(!json_value.is_f64());
-
- let json_value = from_str("12.0").unwrap();
- assert!(json_value.is_f64());
-
- let json_value = from_str("-12.0").unwrap();
- assert!(json_value.is_f64());
- }
-
- #[test]
- fn test_as_i64(){
- let json_value = from_str("-12").unwrap();
- let json_num = json_value.as_i64();
- assert_eq!(json_num, Some(-12));
- }
-
- #[test]
- fn test_as_u64(){
- let json_value = from_str("12").unwrap();
- let json_num = json_value.as_u64();
- assert_eq!(json_num, Some(12));
- }
-
- #[test]
- fn test_as_f64(){
- let json_value = from_str("12.0").unwrap();
- let json_num = json_value.as_f64();
- assert_eq!(json_num, Some(12f64));
- }
-
- #[test]
- fn test_is_boolean(){
- let json_value = from_str("false").unwrap();
- assert!(json_value.is_boolean());
- }
-
- #[test]
- fn test_as_boolean(){
- let json_value = from_str("false").unwrap();
- let json_bool = json_value.as_boolean();
- let expected_bool = false;
- assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool);
- }
-
- #[test]
- fn test_is_null(){
- let json_value = from_str("null").unwrap();
- assert!(json_value.is_null());
- }
-
- #[test]
- fn test_as_null(){
- let json_value = from_str("null").unwrap();
- let json_null = json_value.as_null();
- let expected_null = ();
- assert!(json_null.is_some() && json_null.unwrap() == expected_null);
- }
-
- #[test]
- fn test_encode_hashmap_with_numeric_key() {
- use std::str::from_utf8;
- use std::collections::HashMap;
- let mut hm: HashMap<usize, bool> = HashMap::new();
- hm.insert(1, true);
- let mut mem_buf = Vec::new();
- write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
- let json_str = from_utf8(&mem_buf[..]).unwrap();
- match from_str(json_str) {
- Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
- _ => {} // it parsed and we are good to go
- }
- }
-
- #[test]
- fn test_prettyencode_hashmap_with_numeric_key() {
- use std::str::from_utf8;
- use std::collections::HashMap;
- let mut hm: HashMap<usize, bool> = HashMap::new();
- hm.insert(1, true);
- let mut mem_buf = Vec::new();
- write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
- let json_str = from_utf8(&mem_buf[..]).unwrap();
- match from_str(json_str) {
- Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
- _ => {} // it parsed and we are good to go
- }
- }
-
- #[test]
- fn test_prettyencoder_indent_level_param() {
- use std::str::from_utf8;
- use std::collections::BTreeMap;
-
- let mut tree = BTreeMap::new();
-
- tree.insert("hello".to_string(), String("guten tag".to_string()));
- tree.insert("goodbye".to_string(), String("sayonara".to_string()));
-
- let json = Array(
- // The following layout below should look a lot like
- // the pretty-printed JSON (indent * x)
- vec!
- ( // 0x
- String("greetings".to_string()), // 1x
- Object(tree), // 1x + 2x + 2x + 1x
- ) // 0x
- // End JSON array (7 lines)
- );
-
- // Helper function for counting indents
- fn indents(source: &str) -> usize {
- let trimmed = source.trim_start_matches(' ');
- source.len() - trimmed.len()
- }
-
- // Test up to 4 spaces of indents (more?)
- for i in 0..4 {
- let mut writer = Vec::new();
- write!(&mut writer, "{}",
- super::as_pretty_json(&json).indent(i)).unwrap();
-
- let printed = from_utf8(&writer[..]).unwrap();
-
- // Check for indents at each line
- let lines: Vec<&str> = printed.lines().collect();
- assert_eq!(lines.len(), 7); // JSON should be 7 lines
-
- assert_eq!(indents(lines[0]), 0 * i); // [
- assert_eq!(indents(lines[1]), 1 * i); // "greetings",
- assert_eq!(indents(lines[2]), 1 * i); // {
- assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag",
- assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara"
- assert_eq!(indents(lines[5]), 1 * i); // },
- assert_eq!(indents(lines[6]), 0 * i); // ]
-
- // Finally, test that the pretty-printed JSON is valid
- from_str(printed).ok().expect("Pretty-printed JSON is invalid!");
- }
- }
-
- #[test]
- fn test_hashmap_with_enum_key() {
- use std::collections::HashMap;
- use json;
- #[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Debug)]
- enum Enum {
- Foo,
- #[allow(dead_code)]
- Bar,
- }
- let mut map = HashMap::new();
- map.insert(Enum::Foo, 0);
- let result = json::encode(&map).unwrap();
- assert_eq!(&result[..], r#"{"Foo":0}"#);
- let decoded: HashMap<Enum, _> = json::decode(&result).unwrap();
- assert_eq!(map, decoded);
- }
-
- #[test]
- fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
- use std::collections::HashMap;
- use Decodable;
- let json_str = "{\"1\":true}";
- let json_obj = match from_str(json_str) {
- Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
- Ok(o) => o
- };
- let mut decoder = Decoder::new(json_obj);
- let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder).unwrap();
- }
-
- #[test]
- fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
- use std::collections::HashMap;
- use Decodable;
- let json_str = "{\"a\":true}";
- let json_obj = match from_str(json_str) {
- Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
- Ok(o) => o
- };
- let mut decoder = Decoder::new(json_obj);
- let result: Result<HashMap<usize, bool>, DecoderError> = Decodable::decode(&mut decoder);
- assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string())));
- }
-
- fn assert_stream_equal(src: &str,
- expected: Vec<(JsonEvent, Vec<StackElement>)>) {
- let mut parser = Parser::new(src.chars());
- let mut i = 0;
- loop {
- let evt = match parser.next() {
- Some(e) => e,
- None => { break; }
- };
- let (ref expected_evt, ref expected_stack) = expected[i];
- if !parser.stack().is_equal_to(expected_stack) {
- panic!("Parser stack is not equal to {:?}", expected_stack);
- }
- assert_eq!(&evt, expected_evt);
- i+=1;
- }
- }
- #[test]
- fn test_streaming_parser() {
- assert_stream_equal(
- r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
- vec![
- (ObjectStart, vec![]),
- (StringValue("bar".to_string()), vec![StackElement::Key("foo")]),
- (ArrayStart, vec![StackElement::Key("array")]),
- (U64Value(0), vec![StackElement::Key("array"), StackElement::Index(0)]),
- (U64Value(1), vec![StackElement::Key("array"), StackElement::Index(1)]),
- (U64Value(2), vec![StackElement::Key("array"), StackElement::Index(2)]),
- (U64Value(3), vec![StackElement::Key("array"), StackElement::Index(3)]),
- (U64Value(4), vec![StackElement::Key("array"), StackElement::Index(4)]),
- (U64Value(5), vec![StackElement::Key("array"), StackElement::Index(5)]),
- (ArrayEnd, vec![StackElement::Key("array")]),
- (ArrayStart, vec![StackElement::Key("idents")]),
- (NullValue, vec![StackElement::Key("idents"),
- StackElement::Index(0)]),
- (BooleanValue(true), vec![StackElement::Key("idents"),
- StackElement::Index(1)]),
- (BooleanValue(false), vec![StackElement::Key("idents"),
- StackElement::Index(2)]),
- (ArrayEnd, vec![StackElement::Key("idents")]),
- (ObjectEnd, vec![]),
- ]
- );
- }
- fn last_event(src: &str) -> JsonEvent {
- let mut parser = Parser::new(src.chars());
- let mut evt = NullValue;
- loop {
- evt = match parser.next() {
- Some(e) => e,
- None => return evt,
- }
- }
- }
-
- #[test]
- fn test_read_object_streaming() {
- assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3)));
- assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2)));
- assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
- assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5)));
- assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
-
- assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6)));
- assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6)));
- assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7)));
- assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8)));
- assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8)));
- assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8)));
-
- assert_stream_equal(
- "{}",
- vec![(ObjectStart, vec![]), (ObjectEnd, vec![])]
- );
- assert_stream_equal(
- "{\"a\": 3}",
- vec![
- (ObjectStart, vec![]),
- (U64Value(3), vec![StackElement::Key("a")]),
- (ObjectEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "{ \"a\": null, \"b\" : true }",
- vec![
- (ObjectStart, vec![]),
- (NullValue, vec![StackElement::Key("a")]),
- (BooleanValue(true), vec![StackElement::Key("b")]),
- (ObjectEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "{\"a\" : 1.0 ,\"b\": [ true ]}",
- vec![
- (ObjectStart, vec![]),
- (F64Value(1.0), vec![StackElement::Key("a")]),
- (ArrayStart, vec![StackElement::Key("b")]),
- (BooleanValue(true),vec![StackElement::Key("b"), StackElement::Index(0)]),
- (ArrayEnd, vec![StackElement::Key("b")]),
- (ObjectEnd, vec![]),
- ]
- );
- assert_stream_equal(
- r#"{
- "a": 1.0,
- "b": [
- true,
- "foo\nbar",
- { "c": {"d": null} }
- ]
- }"#,
- vec![
- (ObjectStart, vec![]),
- (F64Value(1.0), vec![StackElement::Key("a")]),
- (ArrayStart, vec![StackElement::Key("b")]),
- (BooleanValue(true), vec![StackElement::Key("b"),
- StackElement::Index(0)]),
- (StringValue("foo\nbar".to_string()), vec![StackElement::Key("b"),
- StackElement::Index(1)]),
- (ObjectStart, vec![StackElement::Key("b"),
- StackElement::Index(2)]),
- (ObjectStart, vec![StackElement::Key("b"),
- StackElement::Index(2),
- StackElement::Key("c")]),
- (NullValue, vec![StackElement::Key("b"),
- StackElement::Index(2),
- StackElement::Key("c"),
- StackElement::Key("d")]),
- (ObjectEnd, vec![StackElement::Key("b"),
- StackElement::Index(2),
- StackElement::Key("c")]),
- (ObjectEnd, vec![StackElement::Key("b"),
- StackElement::Index(2)]),
- (ArrayEnd, vec![StackElement::Key("b")]),
- (ObjectEnd, vec![]),
- ]
- );
- }
- #[test]
- fn test_read_array_streaming() {
- assert_stream_equal(
- "[]",
- vec![
- (ArrayStart, vec![]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[ ]",
- vec![
- (ArrayStart, vec![]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[true]",
- vec![
- (ArrayStart, vec![]),
- (BooleanValue(true), vec![StackElement::Index(0)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[ false ]",
- vec![
- (ArrayStart, vec![]),
- (BooleanValue(false), vec![StackElement::Index(0)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[null]",
- vec![
- (ArrayStart, vec![]),
- (NullValue, vec![StackElement::Index(0)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[3, 1]",
- vec![
- (ArrayStart, vec![]),
- (U64Value(3), vec![StackElement::Index(0)]),
- (U64Value(1), vec![StackElement::Index(1)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "\n[3, 2]\n",
- vec![
- (ArrayStart, vec![]),
- (U64Value(3), vec![StackElement::Index(0)]),
- (U64Value(2), vec![StackElement::Index(1)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[2, [4, 1]]",
- vec![
- (ArrayStart, vec![]),
- (U64Value(2), vec![StackElement::Index(0)]),
- (ArrayStart, vec![StackElement::Index(1)]),
- (U64Value(4), vec![StackElement::Index(1), StackElement::Index(0)]),
- (U64Value(1), vec![StackElement::Index(1), StackElement::Index(1)]),
- (ArrayEnd, vec![StackElement::Index(1)]),
- (ArrayEnd, vec![]),
- ]
- );
-
- assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2)));
-
- assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
- assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
- assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
- assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
-
- }
- #[test]
- fn test_trailing_characters_streaming() {
- assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5)));
- assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5)));
- assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6)));
- assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2)));
- assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
- assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
- }
- #[test]
- fn test_read_identifiers_streaming() {
- assert_eq!(Parser::new("null".chars()).next(), Some(NullValue));
- assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true)));
- assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false)));
-
- assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3)));
- }
-
#[test]
fn test_stack() {
let mut stack = Stack::new();
@@ -3862,76 +2654,6 @@
assert!(stack.get(1) == StackElement::Key("foo"));
}
- #[test]
- fn test_to_json() {
- use std::collections::{HashMap,BTreeMap};
- use super::ToJson;
-
- let array2 = Array(vec![U64(1), U64(2)]);
- let array3 = Array(vec![U64(1), U64(2), U64(3)]);
- let object = {
- let mut tree_map = BTreeMap::new();
- tree_map.insert("a".to_string(), U64(1));
- tree_map.insert("b".to_string(), U64(2));
- Object(tree_map)
- };
-
- assert_eq!(array2.to_json(), array2);
- assert_eq!(object.to_json(), object);
- assert_eq!(3_isize.to_json(), I64(3));
- assert_eq!(4_i8.to_json(), I64(4));
- assert_eq!(5_i16.to_json(), I64(5));
- assert_eq!(6_i32.to_json(), I64(6));
- assert_eq!(7_i64.to_json(), I64(7));
- assert_eq!(8_usize.to_json(), U64(8));
- assert_eq!(9_u8.to_json(), U64(9));
- assert_eq!(10_u16.to_json(), U64(10));
- assert_eq!(11_u32.to_json(), U64(11));
- assert_eq!(12_u64.to_json(), U64(12));
- assert_eq!(13.0_f32.to_json(), F64(13.0_f64));
- assert_eq!(14.0_f64.to_json(), F64(14.0_f64));
- assert_eq!(().to_json(), Null);
- assert_eq!(f32::INFINITY.to_json(), Null);
- assert_eq!(f64::NAN.to_json(), Null);
- assert_eq!(true.to_json(), Boolean(true));
- assert_eq!(false.to_json(), Boolean(false));
- assert_eq!("abc".to_json(), String("abc".to_string()));
- assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
- assert_eq!((1_usize, 2_usize).to_json(), array2);
- assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3);
- assert_eq!([1_usize, 2_usize].to_json(), array2);
- assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3);
- assert_eq!((vec![1_usize, 2_usize]).to_json(), array2);
- assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3);
- let mut tree_map = BTreeMap::new();
- tree_map.insert("a".to_string(), 1 as usize);
- tree_map.insert("b".to_string(), 2);
- assert_eq!(tree_map.to_json(), object);
- let mut hash_map = HashMap::new();
- hash_map.insert("a".to_string(), 1 as usize);
- hash_map.insert("b".to_string(), 2);
- assert_eq!(hash_map.to_json(), object);
- assert_eq!(Some(15).to_json(), I64(15));
- assert_eq!(Some(15 as usize).to_json(), U64(15));
- assert_eq!(None::<isize>.to_json(), Null);
- }
-
- #[test]
- fn test_encode_hashmap_with_arbitrary_key() {
- use std::collections::HashMap;
- #[derive(PartialEq, Eq, Hash, RustcEncodable)]
- struct ArbitraryType(usize);
- let mut hm: HashMap<ArbitraryType, bool> = HashMap::new();
- hm.insert(ArbitraryType(1), true);
- let mut mem_buf = string::String::new();
- let mut encoder = Encoder::new(&mut mem_buf);
- let result = hm.encode(&mut encoder);
- match result.unwrap_err() {
- EncoderError::BadHashmapKey => (),
- _ => panic!("expected bad hash map key")
- }
- }
-
#[bench]
fn bench_streaming_small(b: &mut Bencher) {
b.iter( || {
diff --git a/src/libserialize/lib.rs b/src/libserialize/lib.rs
index fe93a2d..b8eeb4d 100644
--- a/src/libserialize/lib.rs
+++ b/src/libserialize/lib.rs
@@ -8,6 +8,8 @@
html_playground_url = "https://play.rust-lang.org/",
test(attr(allow(unused_variables), deny(warnings))))]
+#![deny(rust_2018_idioms)]
+
#![feature(box_syntax)]
#![feature(core_intrinsics)]
#![feature(specialization)]
@@ -20,8 +22,6 @@
pub use self::serialize::{SpecializationError, SpecializedEncoder, SpecializedDecoder};
pub use self::serialize::{UseSpecializedEncodable, UseSpecializedDecodable};
-extern crate smallvec;
-
mod serialize;
mod collection_impls;
@@ -30,7 +30,3 @@
pub mod opaque;
pub mod leb128;
-
-mod rustc_serialize {
- pub use serialize::*;
-}
diff --git a/src/libserialize/opaque.rs b/src/libserialize/opaque.rs
index b8d4f8a..a6a5c31 100644
--- a/src/libserialize/opaque.rs
+++ b/src/libserialize/opaque.rs
@@ -1,6 +1,6 @@
-use leb128::{self, read_signed_leb128, write_signed_leb128};
+use crate::leb128::{self, read_signed_leb128, write_signed_leb128};
+use crate::serialize;
use std::borrow::Cow;
-use serialize;
// -----------------------------------------------------------------------------
// Encoder
@@ -312,7 +312,7 @@
}
#[inline]
- fn read_str(&mut self) -> Result<Cow<str>, Self::Error> {
+ fn read_str(&mut self) -> Result<Cow<'_, str>, Self::Error> {
let len = self.read_usize()?;
let s = ::std::str::from_utf8(&self.data[self.position..self.position + len]).unwrap();
self.position += len;
@@ -324,288 +324,3 @@
err.to_string()
}
}
-
-
-#[cfg(test)]
-mod tests {
- use serialize::{Encodable, Decodable};
- use std::fmt::Debug;
- use super::{Encoder, Decoder};
-
- #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
- struct Struct {
- a: (),
- b: u8,
- c: u16,
- d: u32,
- e: u64,
- f: usize,
-
- g: i8,
- h: i16,
- i: i32,
- j: i64,
- k: isize,
-
- l: char,
- m: String,
- n: f32,
- o: f64,
- p: bool,
- q: Option<u32>,
- }
-
-
- fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
- let mut encoder = Encoder::new(Vec::new());
-
- for value in &values {
- Encodable::encode(&value, &mut encoder).unwrap();
- }
-
- let data = encoder.into_inner();
- let mut decoder = Decoder::new(&data[..], 0);
-
- for value in values {
- let decoded = Decodable::decode(&mut decoder).unwrap();
- assert_eq!(value, decoded);
- }
- }
-
- #[test]
- fn test_unit() {
- check_round_trip(vec![(), (), (), ()]);
- }
-
- #[test]
- fn test_u8() {
- let mut vec = vec![];
- for i in ::std::u8::MIN..::std::u8::MAX {
- vec.push(i);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_u16() {
- for i in ::std::u16::MIN..::std::u16::MAX {
- check_round_trip(vec![1, 2, 3, i, i, i]);
- }
- }
-
- #[test]
- fn test_u32() {
- check_round_trip(vec![1, 2, 3, ::std::u32::MIN, 0, 1, ::std::u32::MAX, 2, 1]);
- }
-
- #[test]
- fn test_u64() {
- check_round_trip(vec![1, 2, 3, ::std::u64::MIN, 0, 1, ::std::u64::MAX, 2, 1]);
- }
-
- #[test]
- fn test_usize() {
- check_round_trip(vec![1, 2, 3, ::std::usize::MIN, 0, 1, ::std::usize::MAX, 2, 1]);
- }
-
- #[test]
- fn test_i8() {
- let mut vec = vec![];
- for i in ::std::i8::MIN..::std::i8::MAX {
- vec.push(i);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_i16() {
- for i in ::std::i16::MIN..::std::i16::MAX {
- check_round_trip(vec![-1, 2, -3, i, i, i, 2]);
- }
- }
-
- #[test]
- fn test_i32() {
- check_round_trip(vec![-1, 2, -3, ::std::i32::MIN, 0, 1, ::std::i32::MAX, 2, 1]);
- }
-
- #[test]
- fn test_i64() {
- check_round_trip(vec![-1, 2, -3, ::std::i64::MIN, 0, 1, ::std::i64::MAX, 2, 1]);
- }
-
- #[test]
- fn test_isize() {
- check_round_trip(vec![-1, 2, -3, ::std::isize::MIN, 0, 1, ::std::isize::MAX, 2, 1]);
- }
-
- #[test]
- fn test_bool() {
- check_round_trip(vec![false, true, true, false, false]);
- }
-
- #[test]
- fn test_f32() {
- let mut vec = vec![];
- for i in -100..100 {
- vec.push((i as f32) / 3.0);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_f64() {
- let mut vec = vec![];
- for i in -100..100 {
- vec.push((i as f64) / 3.0);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_char() {
- let vec = vec!['a', 'b', 'c', 'd', 'A', 'X', ' ', '#', 'Ö', 'Ä', 'µ', '€'];
- check_round_trip(vec);
- }
-
- #[test]
- fn test_string() {
- let vec = vec!["abcbuÖeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µsbpapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmeabpnvapeapmaebn".to_string(),
- "abcbuÖganeiÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µsbpmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µnvapeapmaebn".to_string()];
-
- check_round_trip(vec);
- }
-
- #[test]
- fn test_option() {
- check_round_trip(vec![Some(-1i8)]);
- check_round_trip(vec![Some(-2i16)]);
- check_round_trip(vec![Some(-3i32)]);
- check_round_trip(vec![Some(-4i64)]);
- check_round_trip(vec![Some(-5isize)]);
-
- let none_i8: Option<i8> = None;
- check_round_trip(vec![none_i8]);
-
- let none_i16: Option<i16> = None;
- check_round_trip(vec![none_i16]);
-
- let none_i32: Option<i32> = None;
- check_round_trip(vec![none_i32]);
-
- let none_i64: Option<i64> = None;
- check_round_trip(vec![none_i64]);
-
- let none_isize: Option<isize> = None;
- check_round_trip(vec![none_isize]);
- }
-
- #[test]
- fn test_struct() {
- check_round_trip(vec![Struct {
- a: (),
- b: 10,
- c: 11,
- d: 12,
- e: 13,
- f: 14,
-
- g: 15,
- h: 16,
- i: 17,
- j: 18,
- k: 19,
-
- l: 'x',
- m: "abc".to_string(),
- n: 20.5,
- o: 21.5,
- p: false,
- q: None,
- }]);
-
- check_round_trip(vec![Struct {
- a: (),
- b: 101,
- c: 111,
- d: 121,
- e: 131,
- f: 141,
-
- g: -15,
- h: -16,
- i: -17,
- j: -18,
- k: -19,
-
- l: 'y',
- m: "def".to_string(),
- n: -20.5,
- o: -21.5,
- p: true,
- q: Some(1234567),
- }]);
- }
-
- #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
- enum Enum {
- Variant1,
- Variant2(usize, f32),
- Variant3 {
- a: i32,
- b: char,
- c: bool,
- },
- }
-
- #[test]
- fn test_enum() {
- check_round_trip(vec![Enum::Variant1,
- Enum::Variant2(1, 2.5),
- Enum::Variant3 {
- a: 3,
- b: 'b',
- c: false,
- },
- Enum::Variant3 {
- a: -4,
- b: 'f',
- c: true,
- }]);
- }
-
- #[test]
- fn test_sequence() {
- let mut vec = vec![];
- for i in -100i64..100i64 {
- vec.push(i * 100000);
- }
-
- check_round_trip(vec![vec]);
- }
-
- #[test]
- fn test_hash_map() {
- use std::collections::HashMap;
- let mut map = HashMap::new();
- for i in -100i64..100i64 {
- map.insert(i * 100000, i * 10000);
- }
-
- check_round_trip(vec![map]);
- }
-
- #[test]
- fn test_tuples() {
- check_round_trip(vec![('x', (), false, 0.5f32)]);
- check_round_trip(vec![(9i8, 10u16, 1.5f64)]);
- check_round_trip(vec![(-12i16, 11u8, 12usize)]);
- check_round_trip(vec![(1234567isize, 100000000000000u64, 99999999999999i64)]);
- check_round_trip(vec![(String::new(), "some string".to_string())]);
- }
-}
diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs
index 03844b3..977a36a 100644
--- a/src/libserialize/serialize.rs
+++ b/src/libserialize/serialize.rs
@@ -175,7 +175,7 @@
fn read_f64(&mut self) -> Result<f64, Self::Error>;
fn read_f32(&mut self) -> Result<f32, Self::Error>;
fn read_char(&mut self) -> Result<char, Self::Error>;
- fn read_str(&mut self) -> Result<Cow<str>, Self::Error>;
+ fn read_str(&mut self) -> Result<Cow<'_, str>, Self::Error>;
// Compound types:
fn read_enum<T, F>(&mut self, _name: &str, f: F) -> Result<T, Self::Error>
diff --git a/src/libserialize/tests/json.rs b/src/libserialize/tests/json.rs
new file mode 100644
index 0000000..3fb6bda
--- /dev/null
+++ b/src/libserialize/tests/json.rs
@@ -0,0 +1,1282 @@
+extern crate serialize as rustc_serialize;
+
+use rustc_serialize::{Encodable, Decodable};
+use rustc_serialize::json;
+use json::Json::*;
+use json::ErrorCode::*;
+use json::ParserError::*;
+use json::DecoderError::*;
+use json::JsonEvent::*;
+use json::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser, StackElement,
+ Decoder, Encoder, EncoderError};
+
+use Animal::*;
+use std::{i64, u64, f32, f64};
+use std::io::prelude::*;
+use std::collections::BTreeMap;
+use std::string;
+
+#[derive(RustcDecodable, Eq, PartialEq, Debug)]
+struct OptionData {
+ opt: Option<usize>,
+}
+
+#[test]
+fn test_decode_option_none() {
+ let s ="{}";
+ let obj: OptionData = json::decode(s).unwrap();
+ assert_eq!(obj, OptionData { opt: None });
+}
+
+#[test]
+fn test_decode_option_some() {
+ let s = "{ \"opt\": 10 }";
+ let obj: OptionData = json::decode(s).unwrap();
+ assert_eq!(obj, OptionData { opt: Some(10) });
+}
+
+#[test]
+fn test_decode_option_malformed() {
+ check_err::<OptionData>("{ \"opt\": [] }",
+ ExpectedError("Number".to_string(), "[]".to_string()));
+ check_err::<OptionData>("{ \"opt\": false }",
+ ExpectedError("Number".to_string(), "false".to_string()));
+}
+
+#[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
+enum Animal {
+ Dog,
+ Frog(string::String, isize)
+}
+
+#[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct Inner {
+ a: (),
+ b: usize,
+ c: Vec<string::String>,
+}
+
+#[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct Outer {
+ inner: Vec<Inner>,
+}
+
+fn mk_object(items: &[(string::String, Json)]) -> Json {
+ let mut d = BTreeMap::new();
+
+ for item in items {
+ match *item {
+ (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
+ }
+ };
+
+ Object(d)
+}
+
+#[test]
+fn test_from_str_trait() {
+ let s = "null";
+ assert!(s.parse::<Json>().unwrap() == s.parse().unwrap());
+}
+
+#[test]
+fn test_write_null() {
+ assert_eq!(Null.to_string(), "null");
+ assert_eq!(Null.pretty().to_string(), "null");
+}
+
+#[test]
+fn test_write_i64() {
+ assert_eq!(U64(0).to_string(), "0");
+ assert_eq!(U64(0).pretty().to_string(), "0");
+
+ assert_eq!(U64(1234).to_string(), "1234");
+ assert_eq!(U64(1234).pretty().to_string(), "1234");
+
+ assert_eq!(I64(-5678).to_string(), "-5678");
+ assert_eq!(I64(-5678).pretty().to_string(), "-5678");
+
+ assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000");
+ assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000");
+}
+
+#[test]
+fn test_write_f64() {
+ assert_eq!(F64(3.0).to_string(), "3.0");
+ assert_eq!(F64(3.0).pretty().to_string(), "3.0");
+
+ assert_eq!(F64(3.1).to_string(), "3.1");
+ assert_eq!(F64(3.1).pretty().to_string(), "3.1");
+
+ assert_eq!(F64(-1.5).to_string(), "-1.5");
+ assert_eq!(F64(-1.5).pretty().to_string(), "-1.5");
+
+ assert_eq!(F64(0.5).to_string(), "0.5");
+ assert_eq!(F64(0.5).pretty().to_string(), "0.5");
+
+ assert_eq!(F64(f64::NAN).to_string(), "null");
+ assert_eq!(F64(f64::NAN).pretty().to_string(), "null");
+
+ assert_eq!(F64(f64::INFINITY).to_string(), "null");
+ assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null");
+
+ assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null");
+ assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null");
+}
+
+#[test]
+fn test_write_str() {
+ assert_eq!(String("".to_string()).to_string(), "\"\"");
+ assert_eq!(String("".to_string()).pretty().to_string(), "\"\"");
+
+ assert_eq!(String("homura".to_string()).to_string(), "\"homura\"");
+ assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\"");
+}
+
+#[test]
+fn test_write_bool() {
+ assert_eq!(Boolean(true).to_string(), "true");
+ assert_eq!(Boolean(true).pretty().to_string(), "true");
+
+ assert_eq!(Boolean(false).to_string(), "false");
+ assert_eq!(Boolean(false).pretty().to_string(), "false");
+}
+
+#[test]
+fn test_write_array() {
+ assert_eq!(Array(vec![]).to_string(), "[]");
+ assert_eq!(Array(vec![]).pretty().to_string(), "[]");
+
+ assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]");
+ assert_eq!(
+ Array(vec![Boolean(true)]).pretty().to_string(),
+ "\
+ [\n \
+ true\n\
+ ]"
+ );
+
+ let long_test_array = Array(vec![
+ Boolean(false),
+ Null,
+ Array(vec![String("foo\nbar".to_string()), F64(3.5)])]);
+
+ assert_eq!(long_test_array.to_string(),
+ "[false,null,[\"foo\\nbar\",3.5]]");
+ assert_eq!(
+ long_test_array.pretty().to_string(),
+ "\
+ [\n \
+ false,\n \
+ null,\n \
+ [\n \
+ \"foo\\nbar\",\n \
+ 3.5\n \
+ ]\n\
+ ]"
+ );
+}
+
+#[test]
+fn test_write_object() {
+ assert_eq!(mk_object(&[]).to_string(), "{}");
+ assert_eq!(mk_object(&[]).pretty().to_string(), "{}");
+
+ assert_eq!(
+ mk_object(&[
+ ("a".to_string(), Boolean(true))
+ ]).to_string(),
+ "{\"a\":true}"
+ );
+ assert_eq!(
+ mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(),
+ "\
+ {\n \
+ \"a\": true\n\
+ }"
+ );
+
+ let complex_obj = mk_object(&[
+ ("b".to_string(), Array(vec![
+ mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
+ mk_object(&[("d".to_string(), String("".to_string()))])
+ ]))
+ ]);
+
+ assert_eq!(
+ complex_obj.to_string(),
+ "{\
+ \"b\":[\
+ {\"c\":\"\\f\\r\"},\
+ {\"d\":\"\"}\
+ ]\
+ }"
+ );
+ assert_eq!(
+ complex_obj.pretty().to_string(),
+ "\
+ {\n \
+ \"b\": [\n \
+ {\n \
+ \"c\": \"\\f\\r\"\n \
+ },\n \
+ {\n \
+ \"d\": \"\"\n \
+ }\n \
+ ]\n\
+ }"
+ );
+
+ let a = mk_object(&[
+ ("a".to_string(), Boolean(true)),
+ ("b".to_string(), Array(vec![
+ mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
+ mk_object(&[("d".to_string(), String("".to_string()))])
+ ]))
+ ]);
+
+ // We can't compare the strings directly because the object fields be
+ // printed in a different order.
+ assert_eq!(a.clone(), a.to_string().parse().unwrap());
+ assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap());
+}
+
+#[test]
+fn test_write_enum() {
+ let animal = Dog;
+ assert_eq!(
+ json::as_json(&animal).to_string(),
+ "\"Dog\""
+ );
+ assert_eq!(
+ json::as_pretty_json(&animal).to_string(),
+ "\"Dog\""
+ );
+
+ let animal = Frog("Henry".to_string(), 349);
+ assert_eq!(
+ json::as_json(&animal).to_string(),
+ "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"
+ );
+ assert_eq!(
+ json::as_pretty_json(&animal).to_string(),
+ "{\n \
+ \"variant\": \"Frog\",\n \
+ \"fields\": [\n \
+ \"Henry\",\n \
+ 349\n \
+ ]\n\
+ }"
+ );
+}
+
+macro_rules! check_encoder_for_simple {
+ ($value:expr, $expected:expr) => ({
+ let s = json::as_json(&$value).to_string();
+ assert_eq!(s, $expected);
+
+ let s = json::as_pretty_json(&$value).to_string();
+ assert_eq!(s, $expected);
+ })
+}
+
+#[test]
+fn test_write_some() {
+ check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\"");
+}
+
+#[test]
+fn test_write_none() {
+ check_encoder_for_simple!(None::<string::String>, "null");
+}
+
+#[test]
+fn test_write_char() {
+ check_encoder_for_simple!('a', "\"a\"");
+ check_encoder_for_simple!('\t', "\"\\t\"");
+ check_encoder_for_simple!('\u{0000}', "\"\\u0000\"");
+ check_encoder_for_simple!('\u{001b}', "\"\\u001b\"");
+ check_encoder_for_simple!('\u{007f}', "\"\\u007f\"");
+ check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\"");
+ check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\"");
+ check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\"");
+}
+
+#[test]
+fn test_trailing_characters() {
+ assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5)));
+ assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5)));
+ assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6)));
+ assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2)));
+ assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
+ assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
+}
+
+#[test]
+fn test_read_identifiers() {
+ assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3)));
+
+ assert_eq!(from_str("null"), Ok(Null));
+ assert_eq!(from_str("true"), Ok(Boolean(true)));
+ assert_eq!(from_str("false"), Ok(Boolean(false)));
+ assert_eq!(from_str(" null "), Ok(Null));
+ assert_eq!(from_str(" true "), Ok(Boolean(true)));
+ assert_eq!(from_str(" false "), Ok(Boolean(false)));
+}
+
+#[test]
+fn test_decode_identifiers() {
+ let v: () = json::decode("null").unwrap();
+ assert_eq!(v, ());
+
+ let v: bool = json::decode("true").unwrap();
+ assert_eq!(v, true);
+
+ let v: bool = json::decode("false").unwrap();
+ assert_eq!(v, false);
+}
+
+#[test]
+fn test_read_number() {
+ assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1)));
+ assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1)));
+ assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1)));
+ assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2)));
+ assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2)));
+ assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3)));
+ assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3)));
+ assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4)));
+
+ assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20)));
+ assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21)));
+
+ assert_eq!(from_str("3"), Ok(U64(3)));
+ assert_eq!(from_str("3.1"), Ok(F64(3.1)));
+ assert_eq!(from_str("-1.2"), Ok(F64(-1.2)));
+ assert_eq!(from_str("0.4"), Ok(F64(0.4)));
+ assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5)));
+ assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15)));
+ assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01)));
+ assert_eq!(from_str(" 3 "), Ok(U64(3)));
+
+ assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN)));
+ assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64)));
+ assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
+}
+
+#[test]
+fn test_decode_numbers() {
+ let v: f64 = json::decode("3").unwrap();
+ assert_eq!(v, 3.0);
+
+ let v: f64 = json::decode("3.1").unwrap();
+ assert_eq!(v, 3.1);
+
+ let v: f64 = json::decode("-1.2").unwrap();
+ assert_eq!(v, -1.2);
+
+ let v: f64 = json::decode("0.4").unwrap();
+ assert_eq!(v, 0.4);
+
+ let v: f64 = json::decode("0.4e5").unwrap();
+ assert_eq!(v, 0.4e5);
+
+ let v: f64 = json::decode("0.4e15").unwrap();
+ assert_eq!(v, 0.4e15);
+
+ let v: f64 = json::decode("0.4e-01").unwrap();
+ assert_eq!(v, 0.4e-01);
+
+ let v: u64 = json::decode("0").unwrap();
+ assert_eq!(v, 0);
+
+ let v: u64 = json::decode("18446744073709551615").unwrap();
+ assert_eq!(v, u64::MAX);
+
+ let v: i64 = json::decode("-9223372036854775808").unwrap();
+ assert_eq!(v, i64::MIN);
+
+ let v: i64 = json::decode("9223372036854775807").unwrap();
+ assert_eq!(v, i64::MAX);
+
+ let res: DecodeResult<i64> = json::decode("765.25");
+ assert_eq!(res, Err(ExpectedError("Integer".to_string(),
+ "765.25".to_string())));
+}
+
+#[test]
+fn test_read_str() {
+ assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
+ assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
+
+ assert_eq!(from_str("\"\""), Ok(String("".to_string())));
+ assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
+ assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
+ assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
+ assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
+ assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
+ assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
+ assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
+ assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string())));
+ assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
+}
+
+#[test]
+fn test_decode_str() {
+ let s = [("\"\"", ""),
+ ("\"foo\"", "foo"),
+ ("\"\\\"\"", "\""),
+ ("\"\\b\"", "\x08"),
+ ("\"\\n\"", "\n"),
+ ("\"\\r\"", "\r"),
+ ("\"\\t\"", "\t"),
+ ("\"\\u12ab\"", "\u{12ab}"),
+ ("\"\\uAB12\"", "\u{AB12}")];
+
+ for &(i, o) in &s {
+ let v: string::String = json::decode(i).unwrap();
+ assert_eq!(v, o);
+ }
+}
+
+#[test]
+fn test_read_array() {
+ assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
+ assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
+ assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
+ assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+
+ assert_eq!(from_str("[]"), Ok(Array(vec![])));
+ assert_eq!(from_str("[ ]"), Ok(Array(vec![])));
+ assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)])));
+ assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)])));
+ assert_eq!(from_str("[null]"), Ok(Array(vec![Null])));
+ assert_eq!(from_str("[3, 1]"),
+ Ok(Array(vec![U64(3), U64(1)])));
+ assert_eq!(from_str("\n[3, 2]\n"),
+ Ok(Array(vec![U64(3), U64(2)])));
+ assert_eq!(from_str("[2, [4, 1]]"),
+ Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])])));
+}
+
+#[test]
+fn test_decode_array() {
+ let v: Vec<()> = json::decode("[]").unwrap();
+ assert_eq!(v, []);
+
+ let v: Vec<()> = json::decode("[null]").unwrap();
+ assert_eq!(v, [()]);
+
+ let v: Vec<bool> = json::decode("[true]").unwrap();
+ assert_eq!(v, [true]);
+
+ let v: Vec<isize> = json::decode("[3, 1]").unwrap();
+ assert_eq!(v, [3, 1]);
+
+ let v: Vec<Vec<usize>> = json::decode("[[3], [1, 2]]").unwrap();
+ assert_eq!(v, [vec![3], vec![1, 2]]);
+}
+
+#[test]
+fn test_decode_tuple() {
+ let t: (usize, usize, usize) = json::decode("[1, 2, 3]").unwrap();
+ assert_eq!(t, (1, 2, 3));
+
+ let t: (usize, string::String) = json::decode("[1, \"two\"]").unwrap();
+ assert_eq!(t, (1, "two".to_string()));
+}
+
+#[test]
+fn test_decode_tuple_malformed_types() {
+ assert!(json::decode::<(usize, string::String)>("[1, 2]").is_err());
+}
+
+#[test]
+fn test_decode_tuple_malformed_length() {
+ assert!(json::decode::<(usize, usize)>("[1, 2, 3]").is_err());
+}
+
+#[test]
+fn test_read_object() {
+ assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
+ assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3)));
+ assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2)));
+ assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
+ assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5)));
+ assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
+
+ assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6)));
+ assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6)));
+ assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7)));
+ assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8)));
+ assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8)));
+
+ assert_eq!(from_str("{}").unwrap(), mk_object(&[]));
+ assert_eq!(from_str("{\"a\": 3}").unwrap(),
+ mk_object(&[("a".to_string(), U64(3))]));
+
+ assert_eq!(from_str(
+ "{ \"a\": null, \"b\" : true }").unwrap(),
+ mk_object(&[
+ ("a".to_string(), Null),
+ ("b".to_string(), Boolean(true))]));
+ assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
+ mk_object(&[
+ ("a".to_string(), Null),
+ ("b".to_string(), Boolean(true))]));
+ assert_eq!(from_str(
+ "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
+ mk_object(&[
+ ("a".to_string(), F64(1.0)),
+ ("b".to_string(), Array(vec![Boolean(true)]))
+ ]));
+ assert_eq!(from_str(
+ "{\
+ \"a\": 1.0, \
+ \"b\": [\
+ true,\
+ \"foo\\nbar\", \
+ { \"c\": {\"d\": null} } \
+ ]\
+ }").unwrap(),
+ mk_object(&[
+ ("a".to_string(), F64(1.0)),
+ ("b".to_string(), Array(vec![
+ Boolean(true),
+ String("foo\nbar".to_string()),
+ mk_object(&[
+ ("c".to_string(), mk_object(&[("d".to_string(), Null)]))
+ ])
+ ]))
+ ]));
+}
+
+#[test]
+fn test_decode_struct() {
+ let s = "{
+ \"inner\": [
+ { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] }
+ ]
+ }";
+
+ let v: Outer = json::decode(s).unwrap();
+ assert_eq!(
+ v,
+ Outer {
+ inner: vec![
+ Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }
+ ]
+ }
+ );
+}
+
+#[derive(RustcDecodable)]
+struct FloatStruct {
+ f: f64,
+ a: Vec<f64>
+}
+#[test]
+fn test_decode_struct_with_nan() {
+ let s = "{\"f\":null,\"a\":[null,123]}";
+ let obj: FloatStruct = json::decode(s).unwrap();
+ assert!(obj.f.is_nan());
+ assert!(obj.a[0].is_nan());
+ assert_eq!(obj.a[1], 123f64);
+}
+
+#[test]
+fn test_decode_option() {
+ let value: Option<string::String> = json::decode("null").unwrap();
+ assert_eq!(value, None);
+
+ let value: Option<string::String> = json::decode("\"jodhpurs\"").unwrap();
+ assert_eq!(value, Some("jodhpurs".to_string()));
+}
+
+#[test]
+fn test_decode_enum() {
+ let value: Animal = json::decode("\"Dog\"").unwrap();
+ assert_eq!(value, Dog);
+
+ let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
+ let value: Animal = json::decode(s).unwrap();
+ assert_eq!(value, Frog("Henry".to_string(), 349));
+}
+
+#[test]
+fn test_decode_map() {
+ let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\
+ \"fields\":[\"Henry\", 349]}}";
+ let mut map: BTreeMap<string::String, Animal> = json::decode(s).unwrap();
+
+ assert_eq!(map.remove(&"a".to_string()), Some(Dog));
+ assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
+}
+
+#[test]
+fn test_multiline_errors() {
+ assert_eq!(from_str("{\n \"foo\":\n \"bar\""),
+ Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
+}
+
+#[derive(RustcDecodable)]
+#[allow(dead_code)]
+struct DecodeStruct {
+ x: f64,
+ y: bool,
+ z: string::String,
+ w: Vec<DecodeStruct>
+}
+#[derive(RustcDecodable)]
+enum DecodeEnum {
+ A(f64),
+ B(string::String)
+}
+fn check_err<T: Decodable>(to_parse: &'static str, expected: DecoderError) {
+ let res: DecodeResult<T> = match from_str(to_parse) {
+ Err(e) => Err(ParseError(e)),
+ Ok(json) => Decodable::decode(&mut Decoder::new(json))
+ };
+ match res {
+ Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`",
+ to_parse, expected),
+ Err(ParseError(e)) => panic!("`{:?}` is not valid json: {:?}",
+ to_parse, e),
+ Err(e) => {
+ assert_eq!(e, expected);
+ }
+ }
+}
+#[test]
+fn test_decode_errors_struct() {
+ check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string()));
+ check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}",
+ ExpectedError("Number".to_string(), "true".to_string()));
+ check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}",
+ ExpectedError("Boolean".to_string(), "[]".to_string()));
+ check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}",
+ ExpectedError("String".to_string(), "{}".to_string()));
+ check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}",
+ ExpectedError("Array".to_string(), "null".to_string()));
+ check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}",
+ MissingFieldError("w".to_string()));
+}
+#[test]
+fn test_decode_errors_enum() {
+ check_err::<DecodeEnum>("{}",
+ MissingFieldError("variant".to_string()));
+ check_err::<DecodeEnum>("{\"variant\": 1}",
+ ExpectedError("String".to_string(), "1".to_string()));
+ check_err::<DecodeEnum>("{\"variant\": \"A\"}",
+ MissingFieldError("fields".to_string()));
+ check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}",
+ ExpectedError("Array".to_string(), "null".to_string()));
+ check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}",
+ UnknownVariantError("C".to_string()));
+}
+
+#[test]
+fn test_find(){
+ let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
+ let found_str = json_value.find("dog");
+ assert!(found_str.unwrap().as_string().unwrap() == "cat");
+}
+
+#[test]
+fn test_find_path(){
+ let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
+ let found_str = json_value.find_path(&["dog", "cat", "mouse"]);
+ assert!(found_str.unwrap().as_string().unwrap() == "cheese");
+}
+
+#[test]
+fn test_search(){
+ let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
+ let found_str = json_value.search("mouse").and_then(|j| j.as_string());
+ assert!(found_str.unwrap() == "cheese");
+}
+
+#[test]
+fn test_index(){
+ let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap();
+ let ref array = json_value["animals"];
+ assert_eq!(array[0].as_string().unwrap(), "dog");
+ assert_eq!(array[1].as_string().unwrap(), "cat");
+ assert_eq!(array[2].as_string().unwrap(), "mouse");
+}
+
+#[test]
+fn test_is_object(){
+ let json_value = from_str("{}").unwrap();
+ assert!(json_value.is_object());
+}
+
+#[test]
+fn test_as_object(){
+ let json_value = from_str("{}").unwrap();
+ let json_object = json_value.as_object();
+ assert!(json_object.is_some());
+}
+
+#[test]
+fn test_is_array(){
+ let json_value = from_str("[1, 2, 3]").unwrap();
+ assert!(json_value.is_array());
+}
+
+#[test]
+fn test_as_array(){
+ let json_value = from_str("[1, 2, 3]").unwrap();
+ let json_array = json_value.as_array();
+ let expected_length = 3;
+ assert!(json_array.is_some() && json_array.unwrap().len() == expected_length);
+}
+
+#[test]
+fn test_is_string(){
+ let json_value = from_str("\"dog\"").unwrap();
+ assert!(json_value.is_string());
+}
+
+#[test]
+fn test_as_string(){
+ let json_value = from_str("\"dog\"").unwrap();
+ let json_str = json_value.as_string();
+ let expected_str = "dog";
+ assert_eq!(json_str, Some(expected_str));
+}
+
+#[test]
+fn test_is_number(){
+ let json_value = from_str("12").unwrap();
+ assert!(json_value.is_number());
+}
+
+#[test]
+fn test_is_i64(){
+ let json_value = from_str("-12").unwrap();
+ assert!(json_value.is_i64());
+
+ let json_value = from_str("12").unwrap();
+ assert!(!json_value.is_i64());
+
+ let json_value = from_str("12.0").unwrap();
+ assert!(!json_value.is_i64());
+}
+
+#[test]
+fn test_is_u64(){
+ let json_value = from_str("12").unwrap();
+ assert!(json_value.is_u64());
+
+ let json_value = from_str("-12").unwrap();
+ assert!(!json_value.is_u64());
+
+ let json_value = from_str("12.0").unwrap();
+ assert!(!json_value.is_u64());
+}
+
+#[test]
+fn test_is_f64(){
+ let json_value = from_str("12").unwrap();
+ assert!(!json_value.is_f64());
+
+ let json_value = from_str("-12").unwrap();
+ assert!(!json_value.is_f64());
+
+ let json_value = from_str("12.0").unwrap();
+ assert!(json_value.is_f64());
+
+ let json_value = from_str("-12.0").unwrap();
+ assert!(json_value.is_f64());
+}
+
+#[test]
+fn test_as_i64(){
+ let json_value = from_str("-12").unwrap();
+ let json_num = json_value.as_i64();
+ assert_eq!(json_num, Some(-12));
+}
+
+#[test]
+fn test_as_u64(){
+ let json_value = from_str("12").unwrap();
+ let json_num = json_value.as_u64();
+ assert_eq!(json_num, Some(12));
+}
+
+#[test]
+fn test_as_f64(){
+ let json_value = from_str("12.0").unwrap();
+ let json_num = json_value.as_f64();
+ assert_eq!(json_num, Some(12f64));
+}
+
+#[test]
+fn test_is_boolean(){
+ let json_value = from_str("false").unwrap();
+ assert!(json_value.is_boolean());
+}
+
+#[test]
+fn test_as_boolean(){
+ let json_value = from_str("false").unwrap();
+ let json_bool = json_value.as_boolean();
+ let expected_bool = false;
+ assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool);
+}
+
+#[test]
+fn test_is_null(){
+ let json_value = from_str("null").unwrap();
+ assert!(json_value.is_null());
+}
+
+#[test]
+fn test_as_null(){
+ let json_value = from_str("null").unwrap();
+ let json_null = json_value.as_null();
+ let expected_null = ();
+ assert!(json_null.is_some() && json_null.unwrap() == expected_null);
+}
+
+#[test]
+fn test_encode_hashmap_with_numeric_key() {
+ use std::str::from_utf8;
+ use std::collections::HashMap;
+ let mut hm: HashMap<usize, bool> = HashMap::new();
+ hm.insert(1, true);
+ let mut mem_buf = Vec::new();
+ write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
+ let json_str = from_utf8(&mem_buf[..]).unwrap();
+ match from_str(json_str) {
+ Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+ _ => {} // it parsed and we are good to go
+ }
+}
+
+#[test]
+fn test_prettyencode_hashmap_with_numeric_key() {
+ use std::str::from_utf8;
+ use std::collections::HashMap;
+ let mut hm: HashMap<usize, bool> = HashMap::new();
+ hm.insert(1, true);
+ let mut mem_buf = Vec::new();
+ write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
+ let json_str = from_utf8(&mem_buf[..]).unwrap();
+ match from_str(json_str) {
+ Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+ _ => {} // it parsed and we are good to go
+ }
+}
+
+#[test]
+fn test_prettyencoder_indent_level_param() {
+ use std::str::from_utf8;
+ use std::collections::BTreeMap;
+
+ let mut tree = BTreeMap::new();
+
+ tree.insert("hello".to_string(), String("guten tag".to_string()));
+ tree.insert("goodbye".to_string(), String("sayonara".to_string()));
+
+ let json = Array(
+ // The following layout below should look a lot like
+ // the pretty-printed JSON (indent * x)
+ vec!
+ ( // 0x
+ String("greetings".to_string()), // 1x
+ Object(tree), // 1x + 2x + 2x + 1x
+ ) // 0x
+ // End JSON array (7 lines)
+ );
+
+ // Helper function for counting indents
+ fn indents(source: &str) -> usize {
+ let trimmed = source.trim_start_matches(' ');
+ source.len() - trimmed.len()
+ }
+
+ // Test up to 4 spaces of indents (more?)
+ for i in 0..4 {
+ let mut writer = Vec::new();
+ write!(&mut writer, "{}",
+ json::as_pretty_json(&json).indent(i)).unwrap();
+
+ let printed = from_utf8(&writer[..]).unwrap();
+
+ // Check for indents at each line
+ let lines: Vec<&str> = printed.lines().collect();
+ assert_eq!(lines.len(), 7); // JSON should be 7 lines
+
+ assert_eq!(indents(lines[0]), 0 * i); // [
+ assert_eq!(indents(lines[1]), 1 * i); // "greetings",
+ assert_eq!(indents(lines[2]), 1 * i); // {
+ assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag",
+ assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara"
+ assert_eq!(indents(lines[5]), 1 * i); // },
+ assert_eq!(indents(lines[6]), 0 * i); // ]
+
+ // Finally, test that the pretty-printed JSON is valid
+ from_str(printed).ok().expect("Pretty-printed JSON is invalid!");
+ }
+}
+
+#[test]
+fn test_hashmap_with_enum_key() {
+ use std::collections::HashMap;
+ #[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Debug)]
+ enum Enum {
+ Foo,
+ #[allow(dead_code)]
+ Bar,
+ }
+ let mut map = HashMap::new();
+ map.insert(Enum::Foo, 0);
+ let result = json::encode(&map).unwrap();
+ assert_eq!(&result[..], r#"{"Foo":0}"#);
+ let decoded: HashMap<Enum, _> = json::decode(&result).unwrap();
+ assert_eq!(map, decoded);
+}
+
+#[test]
+fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
+ use std::collections::HashMap;
+ let json_str = "{\"1\":true}";
+ let json_obj = match from_str(json_str) {
+ Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+ Ok(o) => o
+ };
+ let mut decoder = Decoder::new(json_obj);
+ let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder).unwrap();
+}
+
+#[test]
+fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
+ use std::collections::HashMap;
+ let json_str = "{\"a\":true}";
+ let json_obj = match from_str(json_str) {
+ Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+ Ok(o) => o
+ };
+ let mut decoder = Decoder::new(json_obj);
+ let result: Result<HashMap<usize, bool>, DecoderError> = Decodable::decode(&mut decoder);
+ assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string())));
+}
+
+fn assert_stream_equal(src: &str,
+ expected: Vec<(JsonEvent, Vec<StackElement<'_>>)>) {
+ let mut parser = Parser::new(src.chars());
+ let mut i = 0;
+ loop {
+ let evt = match parser.next() {
+ Some(e) => e,
+ None => { break; }
+ };
+ let (ref expected_evt, ref expected_stack) = expected[i];
+ if !parser.stack().is_equal_to(expected_stack) {
+ panic!("Parser stack is not equal to {:?}", expected_stack);
+ }
+ assert_eq!(&evt, expected_evt);
+ i+=1;
+ }
+}
+#[test]
+fn test_streaming_parser() {
+ assert_stream_equal(
+ r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
+ vec![
+ (ObjectStart, vec![]),
+ (StringValue("bar".to_string()), vec![StackElement::Key("foo")]),
+ (ArrayStart, vec![StackElement::Key("array")]),
+ (U64Value(0), vec![StackElement::Key("array"), StackElement::Index(0)]),
+ (U64Value(1), vec![StackElement::Key("array"), StackElement::Index(1)]),
+ (U64Value(2), vec![StackElement::Key("array"), StackElement::Index(2)]),
+ (U64Value(3), vec![StackElement::Key("array"), StackElement::Index(3)]),
+ (U64Value(4), vec![StackElement::Key("array"), StackElement::Index(4)]),
+ (U64Value(5), vec![StackElement::Key("array"), StackElement::Index(5)]),
+ (ArrayEnd, vec![StackElement::Key("array")]),
+ (ArrayStart, vec![StackElement::Key("idents")]),
+ (NullValue, vec![StackElement::Key("idents"),
+ StackElement::Index(0)]),
+ (BooleanValue(true), vec![StackElement::Key("idents"),
+ StackElement::Index(1)]),
+ (BooleanValue(false), vec![StackElement::Key("idents"),
+ StackElement::Index(2)]),
+ (ArrayEnd, vec![StackElement::Key("idents")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+}
+fn last_event(src: &str) -> JsonEvent {
+ let mut parser = Parser::new(src.chars());
+ let mut evt = NullValue;
+ loop {
+ evt = match parser.next() {
+ Some(e) => e,
+ None => return evt,
+ }
+ }
+}
+
+#[test]
+fn test_read_object_streaming() {
+ assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3)));
+ assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2)));
+ assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
+ assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5)));
+ assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
+
+ assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6)));
+ assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6)));
+ assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7)));
+ assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8)));
+ assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8)));
+ assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8)));
+
+ assert_stream_equal(
+ "{}",
+ vec![(ObjectStart, vec![]), (ObjectEnd, vec![])]
+ );
+ assert_stream_equal(
+ "{\"a\": 3}",
+ vec![
+ (ObjectStart, vec![]),
+ (U64Value(3), vec![StackElement::Key("a")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "{ \"a\": null, \"b\" : true }",
+ vec![
+ (ObjectStart, vec![]),
+ (NullValue, vec![StackElement::Key("a")]),
+ (BooleanValue(true), vec![StackElement::Key("b")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "{\"a\" : 1.0 ,\"b\": [ true ]}",
+ vec![
+ (ObjectStart, vec![]),
+ (F64Value(1.0), vec![StackElement::Key("a")]),
+ (ArrayStart, vec![StackElement::Key("b")]),
+ (BooleanValue(true),vec![StackElement::Key("b"), StackElement::Index(0)]),
+ (ArrayEnd, vec![StackElement::Key("b")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ r#"{
+ "a": 1.0,
+ "b": [
+ true,
+ "foo\nbar",
+ { "c": {"d": null} }
+ ]
+ }"#,
+ vec![
+ (ObjectStart, vec![]),
+ (F64Value(1.0), vec![StackElement::Key("a")]),
+ (ArrayStart, vec![StackElement::Key("b")]),
+ (BooleanValue(true), vec![StackElement::Key("b"),
+ StackElement::Index(0)]),
+ (StringValue("foo\nbar".to_string()), vec![StackElement::Key("b"),
+ StackElement::Index(1)]),
+ (ObjectStart, vec![StackElement::Key("b"),
+ StackElement::Index(2)]),
+ (ObjectStart, vec![StackElement::Key("b"),
+ StackElement::Index(2),
+ StackElement::Key("c")]),
+ (NullValue, vec![StackElement::Key("b"),
+ StackElement::Index(2),
+ StackElement::Key("c"),
+ StackElement::Key("d")]),
+ (ObjectEnd, vec![StackElement::Key("b"),
+ StackElement::Index(2),
+ StackElement::Key("c")]),
+ (ObjectEnd, vec![StackElement::Key("b"),
+ StackElement::Index(2)]),
+ (ArrayEnd, vec![StackElement::Key("b")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+}
+#[test]
+fn test_read_array_streaming() {
+ assert_stream_equal(
+ "[]",
+ vec![
+ (ArrayStart, vec![]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[ ]",
+ vec![
+ (ArrayStart, vec![]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[true]",
+ vec![
+ (ArrayStart, vec![]),
+ (BooleanValue(true), vec![StackElement::Index(0)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[ false ]",
+ vec![
+ (ArrayStart, vec![]),
+ (BooleanValue(false), vec![StackElement::Index(0)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[null]",
+ vec![
+ (ArrayStart, vec![]),
+ (NullValue, vec![StackElement::Index(0)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[3, 1]",
+ vec![
+ (ArrayStart, vec![]),
+ (U64Value(3), vec![StackElement::Index(0)]),
+ (U64Value(1), vec![StackElement::Index(1)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "\n[3, 2]\n",
+ vec![
+ (ArrayStart, vec![]),
+ (U64Value(3), vec![StackElement::Index(0)]),
+ (U64Value(2), vec![StackElement::Index(1)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[2, [4, 1]]",
+ vec![
+ (ArrayStart, vec![]),
+ (U64Value(2), vec![StackElement::Index(0)]),
+ (ArrayStart, vec![StackElement::Index(1)]),
+ (U64Value(4), vec![StackElement::Index(1), StackElement::Index(0)]),
+ (U64Value(1), vec![StackElement::Index(1), StackElement::Index(1)]),
+ (ArrayEnd, vec![StackElement::Index(1)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+
+ assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2)));
+
+ assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
+ assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
+ assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
+ assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+
+}
+#[test]
+fn test_trailing_characters_streaming() {
+ assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5)));
+ assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5)));
+ assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6)));
+ assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2)));
+ assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
+ assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
+}
+#[test]
+fn test_read_identifiers_streaming() {
+ assert_eq!(Parser::new("null".chars()).next(), Some(NullValue));
+ assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true)));
+ assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false)));
+
+ assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3)));
+}
+
+#[test]
+fn test_to_json() {
+ use std::collections::{HashMap,BTreeMap};
+ use json::ToJson;
+
+ let array2 = Array(vec![U64(1), U64(2)]);
+ let array3 = Array(vec![U64(1), U64(2), U64(3)]);
+ let object = {
+ let mut tree_map = BTreeMap::new();
+ tree_map.insert("a".to_string(), U64(1));
+ tree_map.insert("b".to_string(), U64(2));
+ Object(tree_map)
+ };
+
+ assert_eq!(array2.to_json(), array2);
+ assert_eq!(object.to_json(), object);
+ assert_eq!(3_isize.to_json(), I64(3));
+ assert_eq!(4_i8.to_json(), I64(4));
+ assert_eq!(5_i16.to_json(), I64(5));
+ assert_eq!(6_i32.to_json(), I64(6));
+ assert_eq!(7_i64.to_json(), I64(7));
+ assert_eq!(8_usize.to_json(), U64(8));
+ assert_eq!(9_u8.to_json(), U64(9));
+ assert_eq!(10_u16.to_json(), U64(10));
+ assert_eq!(11_u32.to_json(), U64(11));
+ assert_eq!(12_u64.to_json(), U64(12));
+ assert_eq!(13.0_f32.to_json(), F64(13.0_f64));
+ assert_eq!(14.0_f64.to_json(), F64(14.0_f64));
+ assert_eq!(().to_json(), Null);
+ assert_eq!(f32::INFINITY.to_json(), Null);
+ assert_eq!(f64::NAN.to_json(), Null);
+ assert_eq!(true.to_json(), Boolean(true));
+ assert_eq!(false.to_json(), Boolean(false));
+ assert_eq!("abc".to_json(), String("abc".to_string()));
+ assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
+ assert_eq!((1_usize, 2_usize).to_json(), array2);
+ assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3);
+ assert_eq!([1_usize, 2_usize].to_json(), array2);
+ assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3);
+ assert_eq!((vec![1_usize, 2_usize]).to_json(), array2);
+ assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3);
+ let mut tree_map = BTreeMap::new();
+ tree_map.insert("a".to_string(), 1 as usize);
+ tree_map.insert("b".to_string(), 2);
+ assert_eq!(tree_map.to_json(), object);
+ let mut hash_map = HashMap::new();
+ hash_map.insert("a".to_string(), 1 as usize);
+ hash_map.insert("b".to_string(), 2);
+ assert_eq!(hash_map.to_json(), object);
+ assert_eq!(Some(15).to_json(), I64(15));
+ assert_eq!(Some(15 as usize).to_json(), U64(15));
+ assert_eq!(None::<isize>.to_json(), Null);
+}
+
+#[test]
+fn test_encode_hashmap_with_arbitrary_key() {
+ use std::collections::HashMap;
+ #[derive(PartialEq, Eq, Hash, RustcEncodable)]
+ struct ArbitraryType(usize);
+ let mut hm: HashMap<ArbitraryType, bool> = HashMap::new();
+ hm.insert(ArbitraryType(1), true);
+ let mut mem_buf = string::String::new();
+ let mut encoder = Encoder::new(&mut mem_buf);
+ let result = hm.encode(&mut encoder);
+ match result.unwrap_err() {
+ EncoderError::BadHashmapKey => (),
+ _ => panic!("expected bad hash map key")
+ }
+}
diff --git a/src/libserialize/tests/opaque.rs b/src/libserialize/tests/opaque.rs
new file mode 100644
index 0000000..fff6fc6
--- /dev/null
+++ b/src/libserialize/tests/opaque.rs
@@ -0,0 +1,282 @@
+extern crate serialize as rustc_serialize;
+
+use rustc_serialize::{Encodable, Decodable};
+use rustc_serialize::opaque::{Encoder, Decoder};
+use std::fmt::Debug;
+
+#[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
+struct Struct {
+ a: (),
+ b: u8,
+ c: u16,
+ d: u32,
+ e: u64,
+ f: usize,
+
+ g: i8,
+ h: i16,
+ i: i32,
+ j: i64,
+ k: isize,
+
+ l: char,
+ m: String,
+ n: f32,
+ o: f64,
+ p: bool,
+ q: Option<u32>,
+}
+
+
+fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
+ let mut encoder = Encoder::new(Vec::new());
+
+ for value in &values {
+ Encodable::encode(&value, &mut encoder).unwrap();
+ }
+
+ let data = encoder.into_inner();
+ let mut decoder = Decoder::new(&data[..], 0);
+
+ for value in values {
+ let decoded = Decodable::decode(&mut decoder).unwrap();
+ assert_eq!(value, decoded);
+ }
+}
+
+#[test]
+fn test_unit() {
+ check_round_trip(vec![(), (), (), ()]);
+}
+
+#[test]
+fn test_u8() {
+ let mut vec = vec![];
+ for i in ::std::u8::MIN..::std::u8::MAX {
+ vec.push(i);
+ }
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_u16() {
+ for i in ::std::u16::MIN..::std::u16::MAX {
+ check_round_trip(vec![1, 2, 3, i, i, i]);
+ }
+}
+
+#[test]
+fn test_u32() {
+ check_round_trip(vec![1, 2, 3, ::std::u32::MIN, 0, 1, ::std::u32::MAX, 2, 1]);
+}
+
+#[test]
+fn test_u64() {
+ check_round_trip(vec![1, 2, 3, ::std::u64::MIN, 0, 1, ::std::u64::MAX, 2, 1]);
+}
+
+#[test]
+fn test_usize() {
+ check_round_trip(vec![1, 2, 3, ::std::usize::MIN, 0, 1, ::std::usize::MAX, 2, 1]);
+}
+
+#[test]
+fn test_i8() {
+ let mut vec = vec![];
+ for i in ::std::i8::MIN..::std::i8::MAX {
+ vec.push(i);
+ }
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_i16() {
+ for i in ::std::i16::MIN..::std::i16::MAX {
+ check_round_trip(vec![-1, 2, -3, i, i, i, 2]);
+ }
+}
+
+#[test]
+fn test_i32() {
+ check_round_trip(vec![-1, 2, -3, ::std::i32::MIN, 0, 1, ::std::i32::MAX, 2, 1]);
+}
+
+#[test]
+fn test_i64() {
+ check_round_trip(vec![-1, 2, -3, ::std::i64::MIN, 0, 1, ::std::i64::MAX, 2, 1]);
+}
+
+#[test]
+fn test_isize() {
+ check_round_trip(vec![-1, 2, -3, ::std::isize::MIN, 0, 1, ::std::isize::MAX, 2, 1]);
+}
+
+#[test]
+fn test_bool() {
+ check_round_trip(vec![false, true, true, false, false]);
+}
+
+#[test]
+fn test_f32() {
+ let mut vec = vec![];
+ for i in -100..100 {
+ vec.push((i as f32) / 3.0);
+ }
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_f64() {
+ let mut vec = vec![];
+ for i in -100..100 {
+ vec.push((i as f64) / 3.0);
+ }
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_char() {
+ let vec = vec!['a', 'b', 'c', 'd', 'A', 'X', ' ', '#', 'Ö', 'Ä', 'µ', '€'];
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_string() {
+ let vec = vec!["abcbuÖeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µsbpapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmeabpnvapeapmaebn".to_string(),
+ "abcbuÖganeiÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µsbpmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µnvapeapmaebn".to_string()];
+
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_option() {
+ check_round_trip(vec![Some(-1i8)]);
+ check_round_trip(vec![Some(-2i16)]);
+ check_round_trip(vec![Some(-3i32)]);
+ check_round_trip(vec![Some(-4i64)]);
+ check_round_trip(vec![Some(-5isize)]);
+
+ let none_i8: Option<i8> = None;
+ check_round_trip(vec![none_i8]);
+
+ let none_i16: Option<i16> = None;
+ check_round_trip(vec![none_i16]);
+
+ let none_i32: Option<i32> = None;
+ check_round_trip(vec![none_i32]);
+
+ let none_i64: Option<i64> = None;
+ check_round_trip(vec![none_i64]);
+
+ let none_isize: Option<isize> = None;
+ check_round_trip(vec![none_isize]);
+}
+
+#[test]
+fn test_struct() {
+ check_round_trip(vec![Struct {
+ a: (),
+ b: 10,
+ c: 11,
+ d: 12,
+ e: 13,
+ f: 14,
+
+ g: 15,
+ h: 16,
+ i: 17,
+ j: 18,
+ k: 19,
+
+ l: 'x',
+ m: "abc".to_string(),
+ n: 20.5,
+ o: 21.5,
+ p: false,
+ q: None,
+ }]);
+
+ check_round_trip(vec![Struct {
+ a: (),
+ b: 101,
+ c: 111,
+ d: 121,
+ e: 131,
+ f: 141,
+
+ g: -15,
+ h: -16,
+ i: -17,
+ j: -18,
+ k: -19,
+
+ l: 'y',
+ m: "def".to_string(),
+ n: -20.5,
+ o: -21.5,
+ p: true,
+ q: Some(1234567),
+ }]);
+}
+
+#[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
+enum Enum {
+ Variant1,
+ Variant2(usize, f32),
+ Variant3 {
+ a: i32,
+ b: char,
+ c: bool,
+ },
+}
+
+#[test]
+fn test_enum() {
+ check_round_trip(vec![Enum::Variant1,
+ Enum::Variant2(1, 2.5),
+ Enum::Variant3 {
+ a: 3,
+ b: 'b',
+ c: false,
+ },
+ Enum::Variant3 {
+ a: -4,
+ b: 'f',
+ c: true,
+ }]);
+}
+
+#[test]
+fn test_sequence() {
+ let mut vec = vec![];
+ for i in -100i64..100i64 {
+ vec.push(i * 100000);
+ }
+
+ check_round_trip(vec![vec]);
+}
+
+#[test]
+fn test_hash_map() {
+ use std::collections::HashMap;
+ let mut map = HashMap::new();
+ for i in -100i64..100i64 {
+ map.insert(i * 100000, i * 10000);
+ }
+
+ check_round_trip(vec![map]);
+}
+
+#[test]
+fn test_tuples() {
+ check_round_trip(vec![('x', (), false, 0.5f32)]);
+ check_round_trip(vec![(9i8, 10u16, 1.5f64)]);
+ check_round_trip(vec![(-12i16, 11u8, 12usize)]);
+ check_round_trip(vec![(1234567isize, 100000000000000u64, 99999999999999i64)]);
+ check_round_trip(vec![(String::new(), "some string".to_string())]);
+}
diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs
index 878d06c..c844f9e 100644
--- a/src/libsyntax/lib.rs
+++ b/src/libsyntax/lib.rs
@@ -11,6 +11,7 @@
#![feature(crate_visibility_modifier)]
#![feature(label_break_value)]
+#![feature(nll)]
#![feature(rustc_attrs)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_sort_by_cached_key)]
diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs
index 670d71f..7d7fd03 100644
--- a/src/libsyntax_ext/lib.rs
+++ b/src/libsyntax_ext/lib.rs
@@ -9,6 +9,7 @@
#![feature(proc_macro_internals)]
#![feature(proc_macro_span)]
#![feature(decl_macro)]
+#![feature(nll)]
#![feature(str_escape)]
#![feature(rustc_diagnostic_macros)]
diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs
index 70c45f7..dbb4f8f 100644
--- a/src/libsyntax_pos/lib.rs
+++ b/src/libsyntax_pos/lib.rs
@@ -11,6 +11,7 @@
#![feature(const_fn)]
#![feature(crate_visibility_modifier)]
#![feature(custom_attribute)]
+#![feature(nll)]
#![feature(non_exhaustive)]
#![feature(optin_builtin_traits)]
#![feature(rustc_attrs)]
diff --git a/src/libterm/Cargo.toml b/src/libterm/Cargo.toml
index 8021e81..4eba9a9 100644
--- a/src/libterm/Cargo.toml
+++ b/src/libterm/Cargo.toml
@@ -2,6 +2,7 @@
authors = ["The Rust Project Developers"]
name = "term"
version = "0.0.0"
+edition = "2018"
[lib]
name = "term"
diff --git a/src/libterm/lib.rs b/src/libterm/lib.rs
index 4d31262..caca9fa 100644
--- a/src/libterm/lib.rs
+++ b/src/libterm/lib.rs
@@ -35,20 +35,20 @@
test(attr(deny(warnings))))]
#![deny(missing_docs)]
+#![deny(rust_2018_idioms)]
+
#![cfg_attr(windows, feature(libc))]
// Handle rustfmt skips
#![feature(custom_attribute)]
-#![feature(nll)]
#![allow(unused_attributes)]
use std::io::prelude::*;
+use std::io::{self, Stdout, Stderr};
pub use terminfo::TerminfoTerminal;
#[cfg(windows)]
pub use win::WinConsole;
-use std::io::{self, Stdout, Stderr};
-
pub mod terminfo;
#[cfg(windows)]
diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs
index eaa96df..4c3b0b1 100644
--- a/src/libterm/terminfo/mod.rs
+++ b/src/libterm/terminfo/mod.rs
@@ -5,18 +5,16 @@
use std::error;
use std::fmt;
use std::fs::File;
-use std::io::prelude::*;
-use std::io;
-use std::io::BufReader;
+use std::io::{self, prelude::*, BufReader};
use std::path::Path;
-use Attr;
-use color;
-use Terminal;
-use self::searcher::get_dbpath_for_term;
-use self::parser::compiled::{parse, msys_terminfo};
-use self::parm::{expand, Variables, Param};
+use crate::Attr;
+use crate::color;
+use crate::Terminal;
+use searcher::get_dbpath_for_term;
+use parser::compiled::{parse, msys_terminfo};
+use parm::{expand, Variables, Param};
/// A parsed terminfo database entry.
#[derive(Debug)]
@@ -49,7 +47,7 @@
}
fn cause(&self) -> Option<&dyn error::Error> {
- use self::Error::*;
+ use Error::*;
match *self {
IoError(ref e) => Some(e),
_ => None,
@@ -58,8 +56,8 @@
}
impl fmt::Display for Error {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- use self::Error::*;
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ use Error::*;
match *self {
TermUnset => Ok(()),
MalformedTerminfo(ref e) => e.fmt(f),
diff --git a/src/libterm/terminfo/parm.rs b/src/libterm/terminfo/parm.rs
index 434dd4a..4e81b8a 100644
--- a/src/libterm/terminfo/parm.rs
+++ b/src/libterm/terminfo/parm.rs
@@ -40,23 +40,27 @@
/// Container for static and dynamic variable arrays
pub struct Variables {
/// Static variables A-Z
- sta: [Param; 26],
+ sta_va: [Param; 26],
/// Dynamic variables a-z
- dyn: [Param; 26],
+ dyn_va: [Param; 26],
}
impl Variables {
/// Return a new zero-initialized Variables
pub fn new() -> Variables {
Variables {
- sta: [Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0)],
- dyn: [Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0)],
+ sta_va: [
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0)
+ ],
+ dyn_va: [
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0)
+ ],
}
}
}
@@ -249,14 +253,14 @@
if cur >= 'A' && cur <= 'Z' {
if let Some(arg) = stack.pop() {
let idx = (cur as u8) - b'A';
- vars.sta[idx as usize] = arg;
+ vars.sta_va[idx as usize] = arg;
} else {
return Err("stack is empty".to_string());
}
} else if cur >= 'a' && cur <= 'z' {
if let Some(arg) = stack.pop() {
let idx = (cur as u8) - b'a';
- vars.dyn[idx as usize] = arg;
+ vars.dyn_va[idx as usize] = arg;
} else {
return Err("stack is empty".to_string());
}
@@ -267,10 +271,10 @@
GetVar => {
if cur >= 'A' && cur <= 'Z' {
let idx = (cur as u8) - b'A';
- stack.push(vars.sta[idx as usize].clone());
+ stack.push(vars.sta_va[idx as usize].clone());
} else if cur >= 'a' && cur <= 'z' {
let idx = (cur as u8) - b'a';
- stack.push(vars.dyn[idx as usize].clone());
+ stack.push(vars.dyn_va[idx as usize].clone());
} else {
return Err("bad variable name in %g".to_string());
}
diff --git a/src/libterm/terminfo/parser/compiled.rs b/src/libterm/terminfo/parser/compiled.rs
index 63d0183..b7b9ce8 100644
--- a/src/libterm/terminfo/parser/compiled.rs
+++ b/src/libterm/terminfo/parser/compiled.rs
@@ -3,14 +3,14 @@
//! ncurses-compatible compiled terminfo format parsing (term(5))
use std::collections::HashMap;
-use std::io::prelude::*;
use std::io;
+use std::io::prelude::*;
use super::super::TermInfo;
// These are the orders ncurses uses in its compiled format (as of 5.9). Not sure if portable.
#[rustfmt_skip]
-pub static boolfnames: &'static[&'static str] = &["auto_left_margin", "auto_right_margin",
+pub static boolfnames: &[&str] = &["auto_left_margin", "auto_right_margin",
"no_esc_ctlc", "ceol_standout_glitch", "eat_newline_glitch", "erase_overstrike", "generic_type",
"hard_copy", "has_meta_key", "has_status_line", "insert_null_glitch", "memory_above",
"memory_below", "move_insert_mode", "move_standout_mode", "over_strike", "status_line_esc_ok",
@@ -23,13 +23,13 @@
"return_does_clr_eol"];
#[rustfmt_skip]
-pub static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
+pub static boolnames: &[&str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
"gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon",
"nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy",
"xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"];
#[rustfmt_skip]
-pub static numfnames: &'static[&'static str] = &[ "columns", "init_tabs", "lines",
+pub static numfnames: &[&str] = &[ "columns", "init_tabs", "lines",
"lines_of_memory", "magic_cookie_glitch", "padding_baud_rate", "virtual_terminal",
"width_status_line", "num_labels", "label_height", "label_width", "max_attributes",
"maximum_windows", "max_colors", "max_pairs", "no_color_video", "buffer_capacity",
@@ -40,13 +40,13 @@
"new_line_delay", "backspace_delay", "horizontal_tab_delay", "number_of_function_keys"];
#[rustfmt_skip]
-pub static numnames: &'static[&'static str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
+pub static numnames: &[&str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
"vt", "wsl", "nlab", "lh", "lw", "ma", "wnum", "colors", "pairs", "ncv", "bufsz", "spinv",
"spinh", "maddr", "mjump", "mcs", "mls", "npins", "orc", "orl", "orhi", "orvi", "cps", "widcs",
"btns", "bitwin", "bitype", "UTug", "OTdC", "OTdN", "OTdB", "OTdT", "OTkn"];
#[rustfmt_skip]
-pub static stringfnames: &'static[&'static str] = &[ "back_tab", "bell", "carriage_return",
+pub static stringfnames: &[&str] = &[ "back_tab", "bell", "carriage_return",
"change_scroll_region", "clear_all_tabs", "clear_screen", "clr_eol", "clr_eos",
"column_address", "command_character", "cursor_address", "cursor_down", "cursor_home",
"cursor_invisible", "cursor_left", "cursor_mem_address", "cursor_normal", "cursor_right",
@@ -120,7 +120,7 @@
"acs_plus", "memory_lock", "memory_unlock", "box_chars_1"];
#[rustfmt_skip]
-pub static stringnames: &'static[&'static str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
+pub static stringnames: &[&str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
"_", "_", "hpa", "cmdch", "cup", "cud1", "home", "civis", "cub1", "mrcup", "cnorm", "cuf1",
"ll", "cuu1", "cvvis", "dch1", "dl1", "dsl", "hd", "smacs", "blink", "bold", "smcup", "smdc",
"dim", "smir", "invis", "prot", "rev", "smso", "smul", "ech", "rmacs", "sgr0", "rmcup", "rmdc",
diff --git a/src/libterm/win.rs b/src/libterm/win.rs
index 25b03ba..1051ddf 100644
--- a/src/libterm/win.rs
+++ b/src/libterm/win.rs
@@ -7,9 +7,9 @@
use std::io;
use std::io::prelude::*;
-use Attr;
-use color;
-use Terminal;
+use crate::Attr;
+use crate::color;
+use crate::Terminal;
/// A Terminal implementation which uses the Win32 Console API.
pub struct WinConsole<T> {
diff --git a/src/libunwind/lib.rs b/src/libunwind/lib.rs
index b9a9929..0ccffea 100644
--- a/src/libunwind/lib.rs
+++ b/src/libunwind/lib.rs
@@ -4,6 +4,7 @@
#![deny(rust_2018_idioms)]
#![feature(link_cfg)]
+#![feature(nll)]
#![feature(staged_api)]
#![feature(unwind_attributes)]
#![feature(static_nobundle)]
diff --git a/src/test/ui/bad/bad-lint-cap2.stderr b/src/test/ui/bad/bad-lint-cap2.stderr
index d7ec414..b963872 100644
--- a/src/test/ui/bad/bad-lint-cap2.stderr
+++ b/src/test/ui/bad/bad-lint-cap2.stderr
@@ -2,7 +2,7 @@
--> $DIR/bad-lint-cap2.rs:6:5
|
LL | use std::option; //~ ERROR
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/bad-lint-cap2.rs:4:9
diff --git a/src/test/ui/bad/bad-lint-cap3.stderr b/src/test/ui/bad/bad-lint-cap3.stderr
index 5bf0b08..21ed50b 100644
--- a/src/test/ui/bad/bad-lint-cap3.stderr
+++ b/src/test/ui/bad/bad-lint-cap3.stderr
@@ -2,7 +2,7 @@
--> $DIR/bad-lint-cap3.rs:7:5
|
LL | use std::option; //~ WARN
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/bad-lint-cap3.rs:4:9
diff --git a/src/test/ui/imports/unused.stderr b/src/test/ui/imports/unused.stderr
index b56e930..fa82e97 100644
--- a/src/test/ui/imports/unused.stderr
+++ b/src/test/ui/imports/unused.stderr
@@ -2,7 +2,7 @@
--> $DIR/unused.rs:7:24
|
LL | pub(super) use super::f; //~ ERROR unused
- | ^^^^^^^^
+ | ---------------^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/unused.rs:1:9
diff --git a/src/test/ui/issues/issue-30730.stderr b/src/test/ui/issues/issue-30730.stderr
index 0a90107..3cfadd3 100644
--- a/src/test/ui/issues/issue-30730.stderr
+++ b/src/test/ui/issues/issue-30730.stderr
@@ -2,7 +2,7 @@
--> $DIR/issue-30730.rs:3:5
|
LL | use std::thread;
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/issue-30730.rs:2:9
diff --git a/src/test/ui/lint/lint-directives-on-use-items-issue-10534.stderr b/src/test/ui/lint/lint-directives-on-use-items-issue-10534.stderr
index 170b98a..e588d24 100644
--- a/src/test/ui/lint/lint-directives-on-use-items-issue-10534.stderr
+++ b/src/test/ui/lint/lint-directives-on-use-items-issue-10534.stderr
@@ -2,7 +2,7 @@
--> $DIR/lint-directives-on-use-items-issue-10534.rs:12:9
|
LL | use a::x; //~ ERROR: unused import
- | ^^^^
+ | ----^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/lint-directives-on-use-items-issue-10534.rs:1:9
@@ -14,7 +14,7 @@
--> $DIR/lint-directives-on-use-items-issue-10534.rs:21:9
|
LL | use a::y; //~ ERROR: unused import
- | ^^^^
+ | ----^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/lint-directives-on-use-items-issue-10534.rs:20:12
diff --git a/src/test/ui/lint/lint-unused-imports.rs b/src/test/ui/lint/lint-unused-imports.rs
index 4892524..9c5b206 100644
--- a/src/test/ui/lint/lint-unused-imports.rs
+++ b/src/test/ui/lint/lint-unused-imports.rs
@@ -6,7 +6,7 @@
use std::mem::*; // shouldn't get errors for not using
// everything imported
use std::fmt::{};
-//~^ ERROR unused import: `use std::fmt::{};`
+//~^ ERROR unused import: `std::fmt::{}`
// Should get errors for both 'Some' and 'None'
use std::option::Option::{Some, None};
diff --git a/src/test/ui/lint/lint-unused-imports.stderr b/src/test/ui/lint/lint-unused-imports.stderr
index 214f4a4..7970b02 100644
--- a/src/test/ui/lint/lint-unused-imports.stderr
+++ b/src/test/ui/lint/lint-unused-imports.stderr
@@ -1,8 +1,8 @@
-error: unused import: `use std::fmt::{};`
- --> $DIR/lint-unused-imports.rs:8:1
+error: unused import: `std::fmt::{}`
+ --> $DIR/lint-unused-imports.rs:8:5
|
LL | use std::fmt::{};
- | ^^^^^^^^^^^^^^^^^
+ | ----^^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/lint-unused-imports.rs:1:9
@@ -14,37 +14,39 @@
--> $DIR/lint-unused-imports.rs:12:27
|
LL | use std::option::Option::{Some, None};
- | ^^^^ ^^^^
+ | --------------------------^^^^--^^^^-- help: remove the whole `use` item
error: unused import: `test::A`
--> $DIR/lint-unused-imports.rs:15:5
|
LL | use test::A; //~ ERROR unused import: `test::A`
- | ^^^^^^^
+ | ----^^^^^^^- help: remove the whole `use` item
error: unused import: `bar`
--> $DIR/lint-unused-imports.rs:24:18
|
LL | use test2::{foo, bar}; //~ ERROR unused import: `bar`
- | ^^^
+ | --^^^
+ | |
+ | help: remove the unused import
error: unused import: `foo::Square`
--> $DIR/lint-unused-imports.rs:52:13
|
LL | use foo::Square; //~ ERROR unused import: `foo::Square`
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
error: unused import: `self::g`
--> $DIR/lint-unused-imports.rs:68:9
|
LL | use self::g; //~ ERROR unused import: `self::g`
- | ^^^^^^^
+ | ----^^^^^^^- help: remove the whole `use` item
error: unused import: `test2::foo`
--> $DIR/lint-unused-imports.rs:77:9
|
LL | use test2::foo; //~ ERROR unused import: `test2::foo`
- | ^^^^^^^^^^
+ | ----^^^^^^^^^^- help: remove the whole `use` item
error: unused import: `test::B2`
--> $DIR/lint-unused-imports.rs:20:5
diff --git a/src/test/ui/lint/lints-in-foreign-macros.stderr b/src/test/ui/lint/lints-in-foreign-macros.stderr
index 8287ca5..b808ca7 100644
--- a/src/test/ui/lint/lints-in-foreign-macros.stderr
+++ b/src/test/ui/lint/lints-in-foreign-macros.stderr
@@ -2,7 +2,7 @@
--> $DIR/lints-in-foreign-macros.rs:11:16
|
LL | () => {use std::string::ToString;} //~ WARN: unused import
- | ^^^^^^^^^^^^^^^^^^^^^
+ | ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
...
LL | mod a { foo!(); }
| ------- in this macro invocation
@@ -17,13 +17,13 @@
--> $DIR/lints-in-foreign-macros.rs:16:18
|
LL | mod c { baz!(use std::string::ToString;); } //~ WARN: unused import
- | ^^^^^^^^^^^^^^^^^^^^^
+ | ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
warning: unused import: `std::string::ToString`
--> $DIR/lints-in-foreign-macros.rs:17:19
|
LL | mod d { baz2!(use std::string::ToString;); } //~ WARN: unused import
- | ^^^^^^^^^^^^^^^^^^^^^
+ | ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
warning: missing documentation for crate
--> $DIR/lints-in-foreign-macros.rs:4:1
diff --git a/src/test/ui/nll/issue-58053.rs b/src/test/ui/nll/issue-58053.rs
new file mode 100644
index 0000000..d433890
--- /dev/null
+++ b/src/test/ui/nll/issue-58053.rs
@@ -0,0 +1,14 @@
+#![allow(warnings)]
+#![feature(nll)]
+
+fn main() {
+ let i = &3;
+
+ let f = |x: &i32| -> &i32 { x };
+ //~^ ERROR lifetime may not live long enough
+ let j = f(i);
+
+ let g = |x: &i32| { x };
+ //~^ ERROR lifetime may not live long enough
+ let k = g(i);
+}
diff --git a/src/test/ui/nll/issue-58053.stderr b/src/test/ui/nll/issue-58053.stderr
new file mode 100644
index 0000000..9048983
--- /dev/null
+++ b/src/test/ui/nll/issue-58053.stderr
@@ -0,0 +1,20 @@
+error: lifetime may not live long enough
+ --> $DIR/issue-58053.rs:7:33
+ |
+LL | let f = |x: &i32| -> &i32 { x };
+ | - ---- ^ returning this value requires that `'1` must outlive `'2`
+ | | |
+ | | return type of closure is &'2 i32
+ | let's call the lifetime of this reference `'1`
+
+error: lifetime may not live long enough
+ --> $DIR/issue-58053.rs:11:25
+ |
+LL | let g = |x: &i32| { x };
+ | - - ^ returning this value requires that `'1` must outlive `'2`
+ | | |
+ | | return type of closure is &'2 i32
+ | let's call the lifetime of this reference `'1`
+
+error: aborting due to 2 previous errors
+
diff --git a/src/test/ui/rfc-2166-underscore-imports/basic.stderr b/src/test/ui/rfc-2166-underscore-imports/basic.stderr
index 3080359..c7b36ea 100644
--- a/src/test/ui/rfc-2166-underscore-imports/basic.stderr
+++ b/src/test/ui/rfc-2166-underscore-imports/basic.stderr
@@ -2,7 +2,7 @@
--> $DIR/basic.rs:26:9
|
LL | use m::Tr1 as _; //~ WARN unused import
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/basic.rs:4:9
@@ -14,5 +14,5 @@
--> $DIR/basic.rs:27:9
|
LL | use S as _; //~ WARN unused import
- | ^^^^^^
+ | ----^^^^^^- help: remove the whole `use` item
diff --git a/src/test/ui/rfc-2166-underscore-imports/unused-2018.stderr b/src/test/ui/rfc-2166-underscore-imports/unused-2018.stderr
index 4163c28..0bbc172 100644
--- a/src/test/ui/rfc-2166-underscore-imports/unused-2018.stderr
+++ b/src/test/ui/rfc-2166-underscore-imports/unused-2018.stderr
@@ -2,7 +2,7 @@
--> $DIR/unused-2018.rs:6:9
|
LL | use core::any; //~ ERROR unused import: `core::any`
- | ^^^^^^^^^
+ | ----^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/unused-2018.rs:3:9
@@ -14,7 +14,7 @@
--> $DIR/unused-2018.rs:10:9
|
LL | use core; //~ ERROR unused import: `core`
- | ^^^^
+ | ----^^^^- help: remove the whole `use` item
error: aborting due to 2 previous errors
diff --git a/src/test/ui/span/multispan-import-lint.stderr b/src/test/ui/span/multispan-import-lint.stderr
index a730d08..6bd0e9b 100644
--- a/src/test/ui/span/multispan-import-lint.stderr
+++ b/src/test/ui/span/multispan-import-lint.stderr
@@ -10,4 +10,8 @@
LL | #![warn(unused)]
| ^^^^^^
= note: #[warn(unused_imports)] implied by #[warn(unused)]
+help: remove the unused imports
+ |
+LL | use std::cmp::{min};
+ | -- --
diff --git a/src/test/ui/use/use-nested-groups-unused-imports.rs b/src/test/ui/use/use-nested-groups-unused-imports.rs
index 5bdc7b2..5fe8595 100644
--- a/src/test/ui/use/use-nested-groups-unused-imports.rs
+++ b/src/test/ui/use/use-nested-groups-unused-imports.rs
@@ -18,7 +18,7 @@
use foo::bar::baz::{*, *};
//~^ ERROR unused import: `*`
use foo::{};
- //~^ ERROR unused import: `use foo::{};`
+ //~^ ERROR unused import: `foo::{}`
fn main() {
let _: Bar;
diff --git a/src/test/ui/use/use-nested-groups-unused-imports.stderr b/src/test/ui/use/use-nested-groups-unused-imports.stderr
index f60c7f5..6af6f44 100644
--- a/src/test/ui/use/use-nested-groups-unused-imports.stderr
+++ b/src/test/ui/use/use-nested-groups-unused-imports.stderr
@@ -2,7 +2,7 @@
--> $DIR/use-nested-groups-unused-imports.rs:16:11
|
LL | use foo::{Foo, bar::{baz::{}, foobar::*}, *};
- | ^^^ ^^^^^^^ ^^^^^^^^^ ^
+ | ----------^^^--------^^^^^^^--^^^^^^^^^---^-- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/use-nested-groups-unused-imports.rs:3:9
@@ -14,13 +14,15 @@
--> $DIR/use-nested-groups-unused-imports.rs:18:24
|
LL | use foo::bar::baz::{*, *};
- | ^
+ | --^
+ | |
+ | help: remove the unused import
-error: unused import: `use foo::{};`
- --> $DIR/use-nested-groups-unused-imports.rs:20:1
+error: unused import: `foo::{}`
+ --> $DIR/use-nested-groups-unused-imports.rs:20:5
|
LL | use foo::{};
- | ^^^^^^^^^^^^
+ | ----^^^^^^^- help: remove the whole `use` item
error: aborting due to 3 previous errors
diff --git a/src/tools/rustc-std-workspace-core/Cargo.toml b/src/tools/rustc-std-workspace-core/Cargo.toml
index f000d63..d527ce1 100644
--- a/src/tools/rustc-std-workspace-core/Cargo.toml
+++ b/src/tools/rustc-std-workspace-core/Cargo.toml
@@ -6,6 +6,7 @@
description = """
Hack for the compiler's own build system
"""
+edition = "2018"
[lib]
path = "lib.rs"
diff --git a/src/tools/rustc-std-workspace-core/lib.rs b/src/tools/rustc-std-workspace-core/lib.rs
index e2946fe..99d51bc 100644
--- a/src/tools/rustc-std-workspace-core/lib.rs
+++ b/src/tools/rustc-std-workspace-core/lib.rs
@@ -1,6 +1,5 @@
#![feature(no_core)]
#![no_core]
-
-extern crate core;
+#![deny(rust_2018_idioms)]
pub use core::*;
diff --git a/src/tools/rustc-workspace-hack/Cargo.toml b/src/tools/rustc-workspace-hack/Cargo.toml
index f5eeddd..f943ac9 100644
--- a/src/tools/rustc-workspace-hack/Cargo.toml
+++ b/src/tools/rustc-workspace-hack/Cargo.toml
@@ -6,6 +6,7 @@
description = """
Hack for the compiler's own build system
"""
+edition = "2018"
[lib]
path = "lib.rs"