Skip to content

Commit

Permalink
Update rustdoc_ng to syntax and metadata changes
Browse files Browse the repository at this point in the history
  • Loading branch information
emberian committed Sep 16, 2013
1 parent be2f85e commit c1d977a
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 57 deletions.
103 changes: 50 additions & 53 deletions src/rustdoc_ng/clean.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

use its = syntax::parse::token::ident_to_str;

use rustc::metadata::{csearch,decoder,cstore};
use syntax;
use syntax::ast;

Expand Down Expand Up @@ -500,7 +499,7 @@ impl Clean<Type> for ast::Ty {
let t = match self.node {
ty_nil => Unit,
ty_ptr(ref m) => RawPointer(m.mutbl.clean(), ~resolve_type(&m.ty.clean())),
ty_rptr(ref l, ref m) =>
ty_rptr(ref l, ref m) =>
BorrowedRef {lifetime: l.clean(), mutability: m.mutbl.clean(),
type_: ~resolve_type(&m.ty.clean())},
ty_box(ref m) => Managed(m.mutbl.clean(), ~resolve_type(&m.ty.clean())),
Expand Down Expand Up @@ -666,17 +665,32 @@ impl Clean<~str> for syntax::codemap::Span {

#[deriving(Clone, Encodable, Decodable)]
pub struct Path {
name: ~str,
lifetime: Option<Lifetime>,
typarams: ~[Type]
global: bool,
segments: ~[PathSegment],
}

impl Clean<Path> for ast::Path {
fn clean(&self) -> Path {
Path {
name: path_to_str(self),
lifetime: self.rp.clean(),
typarams: self.types.clean(),
global: self.global,
segments: self.segments.clean()
}
}
}

#[deriving(Clone, Encodable, Decodable)]
pub struct PathSegment {
name: ~str,
lifetime: Option<Lifetime>,
types: ~[Type],
}

impl Clean<PathSegment> for ast::PathSegment {
fn clean(&self) -> PathSegment {
PathSegment {
name: self.identifier.clean(),
lifetime: self.lifetime.clean(),
types: self.types.clean()
}
}
}
Expand All @@ -686,7 +700,7 @@ fn path_to_str(p: &ast::Path) -> ~str {

let mut s = ~"";
let mut first = true;
for i in p.idents.iter().map(|x| interner_get(x.name)) {
for i in p.segments.iter().map(|x| interner_get(x.identifier.name)) {
if !first || p.global {
s.push_str("::");
} else {
Expand Down Expand Up @@ -899,7 +913,7 @@ impl ToSource for syntax::codemap::Span {
fn lit_to_str(lit: &ast::lit) -> ~str {
match lit.node {
ast::lit_str(st) => st.to_owned(),
ast::lit_int(ch, ast::ty_char) => ~"'" + ch.to_str() + "'",
ast::lit_char(c) => ~"'" + std::char::from_u32(c).unwrap().to_str() + "'",
ast::lit_int(i, _t) => i.to_str(),
ast::lit_uint(u, _t) => u.to_str(),
ast::lit_int_unsuffixed(i) => i.to_str(),
Expand Down Expand Up @@ -966,7 +980,7 @@ fn resolve_type(t: &Type) -> Type {

let def_id = match *d {
DefFn(i, _) => i,
DefSelf(i, _) | DefSelfTy(i) => return Self(i),
DefSelf(i) | DefSelfTy(i) => return Self(i),
DefTy(i) => i,
DefTrait(i) => {
debug!("saw DefTrait in def_to_id");
Expand All @@ -979,58 +993,41 @@ fn resolve_type(t: &Type) -> Type {
},
DefTyParam(i, _) => return Generic(i.node),
DefStruct(i) => i,
DefTyParamBinder(i) => {
DefTyParamBinder(i) => {
debug!("found a typaram_binder, what is it? %d", i);
return TyParamBinder(i);
},
x => fail!("resolved type maps to a weird def %?", x),
};

if def_id.crate != ast::CRATE_NODE_ID {
use rustc::metadata::decoder::*;

let sess = local_data::get(super::ctxtkey, |x| *x.unwrap()).sess;
let mut path = ~"";
let mut ty = ~"";
do csearch::each_path(sess.cstore, def_id.crate) |pathstr, deflike, _vis| {
match deflike {
decoder::DlDef(di) => {
let d2 = match di {
DefFn(i, _) | DefTy(i) | DefTrait(i) |
DefStruct(i) | DefMod(i) => Some(i),
_ => None,
};
if d2.is_some() {
let d2 = d2.unwrap();
if def_id.node == d2.node {
debug!("found external def: %?", di);
path = pathstr.to_owned();
ty = match di {
DefFn(*) => ~"fn",
DefTy(*) => ~"enum",
DefTrait(*) => ~"trait",
DefPrimTy(p) => match p {
ty_str => ~"str",
ty_bool => ~"bool",
ty_int(t) => match t.to_str() {
~"" => ~"i",
s => s
},
ty_uint(t) => t.to_str(),
ty_float(t) => t.to_str()
},
DefTyParam(*) => ~"generic",
DefStruct(*) => ~"struct",
DefTyParamBinder(*) => ~"typaram_binder",
x => fail!("resolved external maps to a weird def %?", x),
};

}
}
let cratedata = ::rustc::metadata::cstore::get_crate_data(sess.cstore, def_id.crate);
let doc = lookup_item(def_id.node, cratedata.data);
let path = syntax::ast_map::path_to_str_with_sep(item_path(doc), "::", sess.intr());
let ty = match def_like_to_def(item_to_def_like(doc, def_id, def_id.crate)) {
DefFn(*) => ~"fn",
DefTy(*) => ~"enum",
DefTrait(*) => ~"trait",
DefPrimTy(p) => match p {
ty_str => ~"str",
ty_bool => ~"bool",
ty_int(t) => match t.to_str() {
~"" => ~"i",
s => s
},
_ => (),
};
true
ty_uint(t) => t.to_str(),
ty_float(t) => t.to_str(),
ty_char => ~"char",
},
DefTyParam(*) => ~"generic",
DefStruct(*) => ~"struct",
DefTyParamBinder(*) => ~"typaram_binder",
x => fail!("resolved external maps to a weird def %?", x),
};
let cname = cstore::get_crate_data(sess.cstore, def_id.crate).name.to_owned();
let cname = cratedata.name.to_owned();
External(cname + "::" + path, ty)
} else {
ResolvedPath {path: path.clone(), typarams: tpbs.clone(), id: def_id.node}
Expand Down
2 changes: 1 addition & 1 deletion src/rustdoc_ng/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ fn get_ast_and_resolve(cpath: &Path, libs: ~[Path]) -> DocContext {
syntax::diagnostic::emit,
span_diagnostic_handler);

let mut cfg = build_configuration(sess, @"rustdoc_ng", &input);
let mut cfg = build_configuration(sess);
cfg.push(@dummy_spanned(ast::MetaWord(@"stage2")));

let mut crate = phase_1_parse_input(sess, cfg.clone(), &input);
Expand Down
2 changes: 1 addition & 1 deletion src/rustdoc_ng/fold.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std;
use clean::*;
use std::iterator::Extendable;
use std::iter::Extendable;

pub trait DocFolder {
fn fold_item(&mut self, item: Item) -> Option<Item> {
Expand Down
4 changes: 2 additions & 2 deletions src/rustdoc_ng/passes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ fn clean_comment_body(s: ~str) -> ~str {
1 => return lines[0].slice_from(2).trim().to_owned(),
_ => (),
}

let mut ol = std::vec::with_capacity(lines.len());
for line in lines.clone().move_iter() {
// replace meaningless things with a single newline
Expand All @@ -184,7 +184,7 @@ fn clean_comment_body(s: ~str) -> ~str {
}
}
let li = longest_common_prefix(ol.clone());

let x = ol.iter()
.filter(|x| { debug!("cleaning line: %s", **x); true })
.map(|x| if x.len() == 0 { ~"" } else { x.slice_chars(li, x.char_len()).to_owned() })
Expand Down

0 comments on commit c1d977a

Please sign in to comment.