Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Clean up ast::Attribute, ast::CrateConfig, and string interning #37824

Merged
merged 12 commits into from
Nov 21, 2016
52 changes: 28 additions & 24 deletions src/libproc_macro_plugin/qquote.rs
Original file line number Diff line number Diff line change
@@ -34,8 +34,9 @@ use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::ext::base;
use syntax::ext::proc_macro_shim::build_block_emitter;
use syntax::parse::token::{self, Token, gensym_ident, str_to_ident};
use syntax::parse::token::{self, Token};
use syntax::print::pprust;
use syntax::symbol::Symbol;
use syntax::tokenstream::{TokenTree, TokenStream};

// ____________________________________________________________________________________________
@@ -124,7 +125,7 @@ fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindi
} // produce an error or something first
let exp = vec![exp.unwrap().to_owned()];
debug!("RHS: {:?}", exp.clone());
let new_id = gensym_ident("tmp");
let new_id = Ident::with_empty_ctxt(Symbol::gensym("tmp"));
debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone()));
debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec());
bindings.push((new_id, TokenStream::from_tts(exp)));
@@ -179,7 +180,7 @@ fn unravel_concats(tss: Vec<TokenStream>) -> TokenStream {
};

while let Some(ts) = pushes.pop() {
output = build_fn_call(str_to_ident("concat"),
output = build_fn_call(Ident::from_str("concat"),
concat(concat(ts,
from_tokens(vec![Token::Comma])),
output));
@@ -209,18 +210,19 @@ fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec<QTT>) -> (Bindings, T
// FIXME handle sequence repetition tokens
QTT::QDL(qdl) => {
debug!(" QDL: {:?} ", qdl.tts);
let new_id = gensym_ident("qdl_tmp");
let new_id = Ident::with_empty_ctxt(Symbol::gensym("qdl_tmp"));
let mut cct_rec = convert_complex_tts(cx, qdl.tts);
bindings.append(&mut cct_rec.0);
bindings.push((new_id, cct_rec.1));

let sep = build_delim_tok(qdl.delim);

pushes.push(build_mod_call(vec![str_to_ident("proc_macro_tokens"),
str_to_ident("build"),
str_to_ident("build_delimited")],
concat(from_tokens(vec![Token::Ident(new_id)]),
concat(lex(","), sep))));
pushes.push(build_mod_call(
vec![Ident::from_str("proc_macro_tokens"),
Ident::from_str("build"),
Ident::from_str("build_delimited")],
concat(from_tokens(vec![Token::Ident(new_id)]), concat(lex(","), sep)),
));
}
QTT::QIdent(t) => {
pushes.push(TokenStream::from_tts(vec![t]));
@@ -250,13 +252,13 @@ fn unravel(binds: Bindings) -> TokenStream {

/// Checks if the Ident is `unquote`.
fn is_unquote(id: Ident) -> bool {
let qq = str_to_ident("unquote");
let qq = Ident::from_str("unquote");
id.name == qq.name // We disregard context; unquote is _reserved_
}

/// Checks if the Ident is `quote`.
fn is_qquote(id: Ident) -> bool {
let qq = str_to_ident("qquote");
let qq = Ident::from_str("qquote");
id.name == qq.name // We disregard context; qquote is _reserved_
}

@@ -266,7 +268,8 @@ mod int_build {

use syntax::ast::{self, Ident};
use syntax::codemap::{DUMMY_SP};
use syntax::parse::token::{self, Token, keywords, str_to_ident};
use syntax::parse::token::{self, Token, Lit};
use syntax::symbol::keywords;
use syntax::tokenstream::{TokenTree, TokenStream};

// ____________________________________________________________________________________________
@@ -277,19 +280,19 @@ mod int_build {
build_paren_delimited(build_vec(build_token_tt(t))))
}

pub fn emit_lit(l: token::Lit, n: Option<ast::Name>) -> TokenStream {
pub fn emit_lit(l: Lit, n: Option<ast::Name>) -> TokenStream {
let suf = match n {
Some(n) => format!("Some(ast::Name({}))", n.0),
Some(n) => format!("Some(ast::Name({}))", n.as_u32()),
None => "None".to_string(),
};

let lit = match l {
token::Lit::Byte(n) => format!("Lit::Byte(token::intern(\"{}\"))", n.to_string()),
token::Lit::Char(n) => format!("Lit::Char(token::intern(\"{}\"))", n.to_string()),
token::Lit::Integer(n) => format!("Lit::Integer(token::intern(\"{}\"))", n.to_string()),
token::Lit::Float(n) => format!("Lit::Float(token::intern(\"{}\"))", n.to_string()),
token::Lit::Str_(n) => format!("Lit::Str_(token::intern(\"{}\"))", n.to_string()),
token::Lit::ByteStr(n) => format!("Lit::ByteStr(token::intern(\"{}\"))", n.to_string()),
Lit::Byte(n) => format!("Lit::Byte(Symbol::intern(\"{}\"))", n.to_string()),
Lit::Char(n) => format!("Lit::Char(Symbol::intern(\"{}\"))", n.to_string()),
Lit::Float(n) => format!("Lit::Float(Symbol::intern(\"{}\"))", n.to_string()),
Lit::Str_(n) => format!("Lit::Str_(Symbol::intern(\"{}\"))", n.to_string()),
Lit::Integer(n) => format!("Lit::Integer(Symbol::intern(\"{}\"))", n.to_string()),
Lit::ByteStr(n) => format!("Lit::ByteStr(Symbol::intern(\"{}\"))", n.to_string()),
_ => panic!("Unsupported literal"),
};

@@ -388,9 +391,10 @@ mod int_build {
Token::Underscore => lex("_"),
Token::Literal(lit, sfx) => emit_lit(lit, sfx),
// fix ident expansion information... somehow
Token::Ident(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", ident.name)),
Token::Lifetime(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))",
ident.name)),
Token::Ident(ident) =>
lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)),
Token::Lifetime(ident) =>
lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)),
_ => panic!("Unhandled case!"),
}
}
@@ -408,7 +412,7 @@ mod int_build {

/// Takes `input` and returns `vec![input]`.
pub fn build_vec(ts: TokenStream) -> TokenStream {
build_mac_call(str_to_ident("vec"), ts)
build_mac_call(Ident::from_str("vec"), ts)
// tts.clone().to_owned()
}

7 changes: 4 additions & 3 deletions src/libproc_macro_tokens/build.rs
Original file line number Diff line number Diff line change
@@ -13,7 +13,8 @@ extern crate syntax_pos;

use syntax::ast::Ident;
use syntax::codemap::DUMMY_SP;
use syntax::parse::token::{self, Token, keywords, str_to_ident};
use syntax::parse::token::{self, Token};
use syntax::symbol::keywords;
use syntax::tokenstream::{self, TokenTree, TokenStream};
use std::rc::Rc;

@@ -43,13 +44,13 @@ pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool {

/// Convert a `&str` into a Token.
pub fn str_to_token_ident(s: &str) -> Token {
Token::Ident(str_to_ident(s))
Token::Ident(Ident::from_str(s))
}

/// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that
/// corresponds to it.
pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token {
Token::Ident(str_to_ident(&kw.name().as_str()[..]))
Token::Ident(Ident::from_str(&kw.name().as_str()[..]))
}

// ____________________________________________________________________________________________
4 changes: 2 additions & 2 deletions src/librustc/hir/check_attr.rs
Original file line number Diff line number Diff line change
@@ -64,7 +64,7 @@ impl<'a> CheckAttrVisitor<'a> {
None => continue,
};

let (message, label) = match &*name {
let (message, label) = match &*name.as_str() {
"C" => {
conflicting_reprs += 1;
if target != Target::Struct &&
@@ -120,7 +120,7 @@ impl<'a> CheckAttrVisitor<'a> {
}

fn check_attribute(&self, attr: &ast::Attribute, target: Target) {
let name: &str = &attr.name();
let name: &str = &attr.name().as_str();
match name {
"inline" => self.check_inline(attr, target),
"repr" => self.check_repr(attr, target),
16 changes: 8 additions & 8 deletions src/librustc/hir/lowering.rs
Original file line number Diff line number Diff line change
@@ -53,8 +53,8 @@ use syntax::ast::*;
use syntax::errors;
use syntax::ptr::P;
use syntax::codemap::{respan, Spanned};
use syntax::parse::token;
use syntax::std_inject;
use syntax::symbol::{Symbol, keywords};
use syntax::visit::{self, Visitor};
use syntax_pos::Span;

@@ -149,7 +149,7 @@ impl<'a> LoweringContext<'a> {
}

fn str_to_ident(&self, s: &'static str) -> Name {
token::gensym(s)
Symbol::gensym(s)
}

fn with_parent_def<T, F>(&mut self, parent_id: NodeId, f: F) -> T
@@ -400,8 +400,8 @@ impl<'a> LoweringContext<'a> {
// Don't expose `Self` (recovered "keyword used as ident" parse error).
// `rustc::ty` expects `Self` to be only used for a trait's `Self`.
// Instead, use gensym("Self") to create a distinct name that looks the same.
if name == token::keywords::SelfType.name() {
name = token::gensym("Self");
if name == keywords::SelfType.name() {
name = Symbol::gensym("Self");
}

hir::TyParam {
@@ -540,7 +540,7 @@ impl<'a> LoweringContext<'a> {
hir::StructField {
span: f.span,
id: f.id,
name: f.ident.map(|ident| ident.name).unwrap_or(token::intern(&index.to_string())),
name: f.ident.map(|ident| ident.name).unwrap_or(Symbol::intern(&index.to_string())),
vis: self.lower_visibility(&f.vis),
ty: self.lower_ty(&f.ty),
attrs: self.lower_attrs(&f.attrs),
@@ -1189,7 +1189,7 @@ impl<'a> LoweringContext<'a> {
e.span,
hir::PopUnstableBlock,
ThinVec::new());
this.field(token::intern(s), signal_block, ast_expr.span)
this.field(Symbol::intern(s), signal_block, ast_expr.span)
}).collect();
let attrs = ast_expr.attrs.clone();

@@ -1953,9 +1953,9 @@ impl<'a> LoweringContext<'a> {
fn std_path_components(&mut self, components: &[&str]) -> Vec<Name> {
let mut v = Vec::new();
if let Some(s) = self.crate_root {
v.push(token::intern(s));
v.push(Symbol::intern(s));
}
v.extend(components.iter().map(|s| token::intern(s)));
v.extend(components.iter().map(|s| Symbol::intern(s)));
return v;
}

6 changes: 3 additions & 3 deletions src/librustc/hir/map/def_collector.rs
Original file line number Diff line number Diff line change
@@ -19,7 +19,7 @@ use middle::cstore::InlinedItem;
use syntax::ast::*;
use syntax::ext::hygiene::Mark;
use syntax::visit;
use syntax::parse::token::{self, keywords};
use syntax::symbol::{Symbol, keywords};

/// Creates def ids for nodes in the HIR.
pub struct DefCollector<'a> {
@@ -169,7 +169,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {
this.with_parent(variant_def_index, |this| {
for (index, field) in v.node.data.fields().iter().enumerate() {
let name = field.ident.map(|ident| ident.name)
.unwrap_or_else(|| token::intern(&index.to_string()));
.unwrap_or_else(|| Symbol::intern(&index.to_string()));
this.create_def(field.id, DefPathData::Field(name.as_str()));
}

@@ -188,7 +188,7 @@ impl<'a> visit::Visitor for DefCollector<'a> {

for (index, field) in struct_def.fields().iter().enumerate() {
let name = field.ident.map(|ident| ident.name.as_str())
.unwrap_or(token::intern(&index.to_string()).as_str());
.unwrap_or(Symbol::intern(&index.to_string()).as_str());
this.create_def(field.id, DefPathData::Field(name));
}
}
57 changes: 19 additions & 38 deletions src/librustc/hir/map/definitions.rs
Original file line number Diff line number Diff line change
@@ -14,7 +14,7 @@ use std::fmt::Write;
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
use syntax::ast;
use syntax::parse::token::{self, InternedString};
use syntax::symbol::{Symbol, InternedString};
use ty::TyCtxt;
use util::nodemap::NodeMap;

@@ -115,9 +115,9 @@ impl DefPath {
pub fn to_string(&self, tcx: TyCtxt) -> String {
let mut s = String::with_capacity(self.data.len() * 16);

s.push_str(&tcx.original_crate_name(self.krate));
s.push_str(&tcx.original_crate_name(self.krate).as_str());
s.push_str("/");
s.push_str(&tcx.crate_disambiguator(self.krate));
s.push_str(&tcx.crate_disambiguator(self.krate).as_str());

for component in &self.data {
write!(s,
@@ -137,8 +137,8 @@ impl DefPath {
}

pub fn deterministic_hash_to<H: Hasher>(&self, tcx: TyCtxt, state: &mut H) {
tcx.original_crate_name(self.krate).hash(state);
tcx.crate_disambiguator(self.krate).hash(state);
tcx.original_crate_name(self.krate).as_str().hash(state);
tcx.crate_disambiguator(self.krate).as_str().hash(state);
self.data.hash(state);
}
}
@@ -328,7 +328,7 @@ impl DefPathData {
LifetimeDef(ref name) |
EnumVariant(ref name) |
Binding(ref name) |
Field(ref name) => Some(token::intern(name)),
Field(ref name) => Some(Symbol::intern(name)),

Impl |
CrateRoot |
@@ -343,7 +343,7 @@ impl DefPathData {

pub fn as_interned_str(&self) -> InternedString {
use self::DefPathData::*;
match *self {
let s = match *self {
TypeNs(ref name) |
ValueNs(ref name) |
Module(ref name) |
@@ -353,43 +353,24 @@ impl DefPathData {
EnumVariant(ref name) |
Binding(ref name) |
Field(ref name) => {
name.clone()
}

Impl => {
InternedString::new("{{impl}}")
return name.clone();
}

// note that this does not show up in user printouts
CrateRoot => {
InternedString::new("{{root}}")
}
CrateRoot => "{{root}}",

// note that this does not show up in user printouts
InlinedRoot(_) => {
InternedString::new("{{inlined-root}}")
}

Misc => {
InternedString::new("{{?}}")
}

ClosureExpr => {
InternedString::new("{{closure}}")
}

StructCtor => {
InternedString::new("{{constructor}}")
}

Initializer => {
InternedString::new("{{initializer}}")
}
InlinedRoot(_) => "{{inlined-root}}",

Impl => "{{impl}}",
Misc => "{{?}}",
ClosureExpr => "{{closure}}",
StructCtor => "{{constructor}}",
Initializer => "{{initializer}}",
ImplTrait => "{{impl-Trait}}",
};

ImplTrait => {
InternedString::new("{{impl-Trait}}")
}
}
Symbol::intern(s).as_str()
}

pub fn to_string(&self) -> String {
5 changes: 2 additions & 3 deletions src/librustc/hir/map/mod.rs
Original file line number Diff line number Diff line change
@@ -765,7 +765,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> {
None => return false,
Some((node_id, name)) => (node_id, name),
};
if &part[..] != mod_name.as_str() {
if mod_name != &**part {
return false;
}
cursor = self.map.get_parent(mod_id);
@@ -803,8 +803,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> {
// We are looking at some node `n` with a given name and parent
// id; do their names match what I am seeking?
fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool {
name.as_str() == &self.item_name[..] &&
self.suffix_matches(parent_of_n)
name == &**self.item_name && self.suffix_matches(parent_of_n)
}
}

Loading