auto merge of #9636 : alexcrichton/rust/rustdoc, r=huonw

Commits have all the juicy details.

Import thing to note in this pull request is that `rustdoc html crate.rs` becomes `rustdoc crate.rs`
This commit is contained in:
bors 2013-10-01 04:31:31 -07:00
commit 0dce112af7
5 changed files with 298 additions and 117 deletions

View File

@ -3,49 +3,84 @@
rustdoc \- generate documentation from Rust source code
.SH SYNOPSIS
.B rustdoc
[\fIOPTIONS\fR] \fICRATEFILE\fR
[\fIOPTIONS\fR] \fIINPUT\fR
.SH DESCRIPTION
This tool generates API reference documentation by extracting comments from
source code written in the Rust language, available at <\fBhttps://www.rust-
lang.org\fR>. It provides several output formats for the generated
documentation.
source code written in the Rust language, available at
<\fBhttps://www.rust-lang.org\fR>. It accepts several input formats and provides
several output formats for the generated documentation.
.SH COMMANDS
.SH OPTIONS
.TP
--output-dir <val>
Put documents here (default: .)
-r --input-format <val>
html or json (default: inferred)
.TP
--output-format <val>
markdown or html (default: html)
-w --output-format <val>
html or json (default: html)
.TP
--output-style <val>
doc-per-crate or doc-per-mod (default: doc-per-mod)
-o --output <val>
where to place the output (default: doc/ for html, doc.json for json)
.TP
--pandoc-cmd <val>
Command for running pandoc
--passes <val>
space-separated list of passes to run (default: '')
.TP
--no-defaults
don't run the default passes
.TP
--plugins <val>
space-separated list of plugins to run (default: '')
.TP
--plugin-path <val>
directory to load plugins from (default: /tmp/rustdoc_ng/plugins)
.TP
-L --library-path <val>
directory to add to crate search path
.TP
-h, --help
Print help
.SH "OUTPUT FORMATS"
The rustdoc tool can generate documentation in either the Markdown
or HTML formats. It requires the pandoc tool
<\fBhttp://johnmacfarlane.net/pandoc/\fR> for conversion features.
The rustdoc tool can generate output in either an HTML or JSON format.
If using an HTML format, then the specified output destination will be the root
directory of an HTML structure for all the documentation. Pages will be placed
into this directory, and source files will also possibly be rendered into it as
well.
If using a JSON format, then the specified output destination will have the
rustdoc output serialized as JSON into it. This output format exists to
pre-compile documentation for crates, and for usage in non-rustdoc tools. The
JSON output is the following hash:
{
"schema": VERSION,
"crate": ...,
"plugins": ...,
}
The schema version indicates what the structure of crate/plugins will look
like. Within a schema version the structure will remain the same. The `crate`
field will contain all relevant documentation for the source being documented,
and the `plugins` field will contain the output of the plugins run over the
crate.
.SH "EXAMPLES"
To generate documentation for the source in the current directory:
$ rustdoc hello.rs
To build documentation into a subdirectory named 'doc' in the Markdown
format:
$ rustdoc --output-dir doc --output-format markdown hello.rs
List all available passes that rustdoc has, along with default passes:
$ rustdoc --passes list
The generated HTML can be viewed with any standard web browser, while
the Markdown version is well-suited for conversion into other formats.
To precompile the documentation for a crate, and then use it to render html at
a later date:
$ rustdoc -w json hello.rs
$ rustdoc doc.json
The generated HTML can be viewed with any standard web browser.
.SH "SEE ALSO"

View File

@ -227,7 +227,7 @@ RUSTDOC = $(HBIN2_H_$(CFG_BUILD_TRIPLE))/rustdoc$(X_$(CFG_BUILD_TRIPLE))
define libdoc
doc/$(1)/index.html: $$(RUSTDOC) $$(TLIB2_T_$(3)_H_$(3))/$(CFG_STDLIB_$(3))
@$$(call E, rustdoc: $$@)
$(Q)$(RUSTDOC) html $(2)
$(Q)$(RUSTDOC) $(2)
DOCS += doc/$(1)/index.html
endef

View File

@ -28,6 +28,7 @@ use std::vec;
use extra::arc::RWArc;
use extra::json::ToJson;
use extra::sort;
use extra::time;
use syntax::ast;
use syntax::ast_util::is_local;
@ -60,7 +61,7 @@ struct Cache {
// typaram id => name of that typaram
typarams: HashMap<ast::NodeId, ~str>,
// type id => all implementations for that type
impls: HashMap<ast::NodeId, ~[clean::Impl]>,
impls: HashMap<ast::NodeId, ~[(clean::Impl, Option<~str>)]>,
// path id => (full qualified path, shortty) -- used to generate urls
paths: HashMap<ast::NodeId, (~[~str], &'static str)>,
// trait id => method name => dox
@ -76,7 +77,7 @@ struct Cache {
struct SourceCollector<'self> {
seen: HashSet<~str>,
dst: Path,
cx: &'self Context,
cx: &'self mut Context,
}
struct Item<'self> { cx: &'self Context, item: &'self clean::Item, }
@ -179,7 +180,9 @@ pub fn run(mut crate: clean::Crate, dst: Path) {
w.flush();
}
if cx.include_sources {
info2!("emitting source files");
let started = time::precise_time_ns();
{
let dst = cx.dst.push("src");
mkdir(&dst);
let dst = dst.push(crate.name);
@ -187,13 +190,18 @@ pub fn run(mut crate: clean::Crate, dst: Path) {
let mut folder = SourceCollector {
dst: dst,
seen: HashSet::new(),
cx: &cx,
cx: &mut cx,
};
crate = folder.fold_crate(crate);
}
let ended = time::precise_time_ns();
info2!("Took {:.03f}s", (ended as f64 - started as f64) / 1e9f64);
// Now render the whole crate.
info2!("rendering the whole crate");
let started = time::precise_time_ns();
cx.crate(crate, cache);
let ended = time::precise_time_ns();
info2!("Took {:.03f}s", (ended as f64 - started as f64) / 1e9f64);
}
fn write(dst: Path, contents: &str) {
@ -229,16 +237,28 @@ fn clean_srcpath(src: &str, f: &fn(&str)) {
impl<'self> DocFolder for SourceCollector<'self> {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
if !self.seen.contains(&item.source.filename) {
self.emit_source(item.source.filename);
if self.cx.include_sources && !self.seen.contains(&item.source.filename) {
// If it turns out that we couldn't read this file, then we probably
// can't read any of the files (generating html output from json or
// something like that), so just don't include sources for the
// entire crate. The other option is maintaining this mapping on a
// per-file basis, but that's probably not worth it...
self.cx.include_sources = self.emit_source(item.source.filename);
self.seen.insert(item.source.filename.clone());
if !self.cx.include_sources {
println!("warning: source code was requested to be rendered, \
but `{}` is a missing source file.",
item.source.filename);
println!(" skipping rendering of source code");
}
}
self.fold_item_recur(item)
}
}
impl<'self> SourceCollector<'self> {
fn emit_source(&self, filename: &str) {
fn emit_source(&mut self, filename: &str) -> bool {
let p = Path(filename);
// Read the contents of the file
@ -251,7 +271,11 @@ impl<'self> SourceCollector<'self> {
// If we couldn't open this file, then just returns because it
// probably means that it's some standard library macro thing and we
// can't have the source to it anyway.
let mut r = match r { Some(r) => r, None => return };
let mut r = match r {
Some(r) => r,
// eew macro hacks
None => return filename == "<std-macros>"
};
// read everything
loop {
@ -273,7 +297,8 @@ impl<'self> SourceCollector<'self> {
}
let dst = cur.push(*p.components.last() + ".html");
let mut w = dst.open_writer(io::CreateOrTruncate);
let w = dst.open_writer(io::CreateOrTruncate);
let mut w = BufferedWriter::new(w);
let title = format!("{} -- source", *dst.components.last());
let page = layout::Page {
@ -283,6 +308,8 @@ impl<'self> SourceCollector<'self> {
};
layout::render(&mut w as &mut io::Writer, &self.cx.layout,
&page, &(""), &Source(contents.as_slice()));
w.flush();
return true;
}
}
@ -427,21 +454,34 @@ impl DocFolder for Cache {
// implementations elsewhere
let ret = match self.fold_item_recur(item) {
Some(item) => {
match item.inner {
clean::ImplItem(i) => {
match item {
clean::Item{ attrs, inner: clean::ImplItem(i), _ } => {
match i.for_ {
clean::ResolvedPath { did, _ } if is_local(did) => {
let id = did.node;
let v = do self.impls.find_or_insert_with(id) |_| {
~[]
};
v.push(i);
// extract relevant documentation for this impl
match attrs.move_iter().find(|a| {
match *a {
clean::NameValue(~"doc", _) => true,
_ => false
}
}) {
Some(clean::NameValue(_, dox)) => {
v.push((i, Some(dox)));
}
Some(*) | None => {
v.push((i, None));
}
}
}
_ => {}
}
None
}
_ => Some(item),
i => Some(i),
}
}
i => i,
@ -1050,10 +1090,24 @@ fn render_method(w: &mut io::Writer, meth: &clean::Item, withlink: bool) {
fn item_struct(w: &mut io::Writer, it: &clean::Item, s: &clean::Struct) {
write!(w, "<pre class='struct'>");
render_struct(w, it, Some(&s.generics), s.struct_type, s.fields, "");
render_struct(w, it, Some(&s.generics), s.struct_type, s.fields, "", true);
write!(w, "</pre>");
document(w, it);
match s.struct_type {
doctree::Plain => {
write!(w, "<h2 class='fields'>Fields</h2>\n<table>");
for field in s.fields.iter() {
write!(w, "<tr><td id='structfield.{name}'>\
<code>{name}</code></td><td>",
name = field.name.get_ref().as_slice());
document(w, field);
write!(w, "</td></tr>");
}
write!(w, "</table>");
}
_ => {}
}
render_methods(w, it);
}
@ -1067,34 +1121,46 @@ fn item_enum(w: &mut io::Writer, it: &clean::Item, e: &clean::Enum) {
} else {
write!(w, " \\{\n");
for v in e.variants.iter() {
let name = format!("<a name='variant.{0}'>{0}</a>",
v.name.get_ref().as_slice());
write!(w, " ");
let name = v.name.get_ref().as_slice();
match v.inner {
clean::VariantItem(ref var) => {
match var.kind {
clean::CLikeVariant => write!(w, " {},\n", name),
clean::CLikeVariant => write!(w, "{}", name),
clean::TupleVariant(ref tys) => {
write!(w, " {}(", name);
write!(w, "{}(", name);
for (i, ty) in tys.iter().enumerate() {
if i > 0 { write!(w, ", ") }
write!(w, "{}", *ty);
}
write!(w, "),\n");
write!(w, ")");
}
clean::StructVariant(ref s) => {
render_struct(w, v, None, s.struct_type, s.fields,
" ");
" ", false);
}
}
}
_ => unreachable!()
}
write!(w, ",\n");
}
write!(w, "\\}");
}
write!(w, "</pre>");
document(w, it);
if e.variants.len() > 0 {
write!(w, "<h2 class='variants'>Variants</h2>\n<table>");
for variant in e.variants.iter() {
write!(w, "<tr><td id='variant.{name}'><code>{name}</code></td><td>",
name = variant.name.get_ref().as_slice());
document(w, variant);
write!(w, "</td></tr>");
}
write!(w, "</table>");
}
render_methods(w, it);
}
@ -1102,9 +1168,11 @@ fn render_struct(w: &mut io::Writer, it: &clean::Item,
g: Option<&clean::Generics>,
ty: doctree::StructType,
fields: &[clean::Item],
tab: &str) {
write!(w, "{}struct {}",
tab: &str,
structhead: bool) {
write!(w, "{}{}{}",
VisSpace(it.visibility),
if structhead {"struct "} else {""},
it.name.get_ref().as_slice());
match g {
Some(g) => write!(w, "{}", *g),
@ -1112,16 +1180,15 @@ fn render_struct(w: &mut io::Writer, it: &clean::Item,
}
match ty {
doctree::Plain => {
write!(w, " \\{\n");
write!(w, " \\{\n{}", tab);
for field in fields.iter() {
match field.inner {
clean::StructFieldItem(ref ty) => {
write!(w, " {}<a name='structfield.{name}'>{name}</a>: \
{},\n{}",
write!(w, " {}{}: {},\n{}",
VisSpace(field.visibility),
field.name.get_ref().as_slice(),
ty.type_,
tab,
name = field.name.get_ref().as_slice());
tab);
}
_ => unreachable!()
}
@ -1151,22 +1218,26 @@ fn render_methods(w: &mut io::Writer, it: &clean::Item) {
do cache.read |c| {
match c.impls.find(&it.id) {
Some(v) => {
let mut non_trait = v.iter().filter(|i| i.trait_.is_none());
let mut non_trait = v.iter().filter(|p| {
p.n0_ref().trait_.is_none()
});
let non_trait = non_trait.to_owned_vec();
let mut traits = v.iter().filter(|i| i.trait_.is_some());
let mut traits = v.iter().filter(|p| {
p.n0_ref().trait_.is_some()
});
let traits = traits.to_owned_vec();
if non_trait.len() > 0 {
write!(w, "<h2 id='methods'>Methods</h2>");
for &i in non_trait.iter() {
render_impl(w, i);
for &(ref i, ref dox) in non_trait.move_iter() {
render_impl(w, i, dox);
}
}
if traits.len() > 0 {
write!(w, "<h2 id='implementations'>Trait \
Implementations</h2>");
for &i in traits.iter() {
render_impl(w, i);
for &(ref i, ref dox) in traits.move_iter() {
render_impl(w, i, dox);
}
}
}
@ -1176,7 +1247,7 @@ fn render_methods(w: &mut io::Writer, it: &clean::Item) {
}
}
fn render_impl(w: &mut io::Writer, i: &clean::Impl) {
fn render_impl(w: &mut io::Writer, i: &clean::Impl, dox: &Option<~str>) {
write!(w, "<h3 class='impl'><code>impl{} ", i.generics);
let trait_id = match i.trait_ {
Some(ref ty) => {
@ -1189,6 +1260,13 @@ fn render_impl(w: &mut io::Writer, i: &clean::Impl) {
None => None
};
write!(w, "{}</code></h3>", i.for_);
match *dox {
Some(ref dox) => {
write!(w, "<div class='docblock'>{}</div>",
Markdown(dox.as_slice()));
}
None => {}
}
write!(w, "<div class='methods'>");
for meth in i.methods.iter() {
write!(w, "<h4 id='method.{}' class='method'><code>",

View File

@ -224,8 +224,8 @@
}, 20);
});
$(document).off('keyup.searchnav');
$(document).on('keyup.searchnav', function (e) {
$(document).off('keypress.searchnav');
$(document).on('keypress.searchnav', function (e) {
var $active = $results.filter('.highlighted');
if (e.keyCode === 38) { // up
@ -321,8 +321,8 @@
output += "</p>";
$('#main.content').addClass('hidden');
$('#search.content').removeClass('hidden').html(output);
$('.search-results .desc').width($('.content').width() - 40 -
$('.content td:first-child').first().width());
$('#search .desc').width($('#search').width() - 40 -
$('#search td:first-child').first().width());
initSearchNav();
}

View File

@ -21,13 +21,15 @@ extern mod syntax;
extern mod rustc;
extern mod extra;
use extra::serialize::Encodable;
use extra::time;
use extra::getopts::groups;
use std::cell::Cell;
use std::rt::io;
use std::rt::io::Writer;
use std::rt::io::file::FileInfo;
use std::rt::io;
use extra::getopts;
use extra::getopts::groups;
use extra::json;
use extra::serialize::{Decodable, Encodable};
use extra::time;
pub mod clean;
pub mod core;
@ -70,9 +72,7 @@ static DEFAULT_PASSES: &'static [&'static str] = &[
local_data_key!(pub ctxtkey: @core::DocContext)
enum OutputFormat {
HTML, JSON
}
type Output = (clean::Crate, ~[plugins::PluginJson]);
pub fn main() {
std::os::set_exit_status(main_args(std::os::args()));
@ -81,6 +81,12 @@ pub fn main() {
pub fn opts() -> ~[groups::OptGroup] {
use extra::getopts::groups::*;
~[
optflag("h", "help", "show this help message"),
optopt("r", "input-format", "the input type of the specified file",
"[rust|json]"),
optopt("w", "output-format", "the output type to write",
"[html|json]"),
optopt("o", "output", "where to place the output", "PATH"),
optmulti("L", "library-path", "directory to add to crate search path",
"DIR"),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
@ -89,32 +95,22 @@ pub fn opts() -> ~[groups::OptGroup] {
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
optflag("h", "help", "show this help message"),
optflag("", "nodefaults", "don't run the default passes"),
optopt("o", "output", "where to place the output", "PATH"),
optflag("", "no-defaults", "don't run the default passes"),
]
}
pub fn usage(argv0: &str) {
println(groups::usage(format!("{} [options] [html|json] <crate>",
argv0), opts()));
println(groups::usage(format!("{} [options] <input>", argv0), opts()));
}
pub fn main_args(args: &[~str]) -> int {
//use extra::getopts::groups::*;
let matches = groups::getopts(args.tail(), opts()).unwrap();
if matches.opt_present("h") || matches.opt_present("help") {
usage(args[0]);
return 0;
}
let mut default_passes = !matches.opt_present("nodefaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
if passes == ~[~"list"] {
if matches.opt_strs("passes") == ~[~"list"] {
println("Available passes for running rustdoc:");
for &(name, _, description) in PASSES.iter() {
println!("{:>20s} - {}", name, description);
@ -126,26 +122,69 @@ pub fn main_args(args: &[~str]) -> int {
return 0;
}
let (format, cratefile) = match matches.free.clone() {
[~"json", crate] => (JSON, crate),
[~"html", crate] => (HTML, crate),
[s, _] => {
println!("Unknown output format: `{}`", s);
usage(args[0]);
return 1;
}
[_, .._] => {
println!("Expected exactly one crate to process");
usage(args[0]);
return 1;
}
_ => {
println!("Expected an output format and then one crate");
usage(args[0]);
let (crate, res) = match acquire_input(&matches) {
Ok(pair) => pair,
Err(s) => {
println!("input error: {}", s);
return 1;
}
};
info2!("going to format");
let started = time::precise_time_ns();
let output = matches.opt_str("o").map(|s| Path(*s));
match matches.opt_str("w") {
Some(~"html") | None => {
html::render::run(crate, output.unwrap_or(Path("doc")))
}
Some(~"json") => {
json_output(crate, res, output.unwrap_or(Path("doc.json")))
}
Some(s) => {
println!("unknown output format: {}", s);
return 1;
}
}
let ended = time::precise_time_ns();
info2!("Took {:.03f}s", (ended as f64 - started as f64) / 1e9f64);
return 0;
}
/// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(matches: &getopts::Matches) -> Result<Output, ~str> {
if matches.free.len() == 0 {
return Err(~"expected an input file to act on");
} if matches.free.len() > 1 {
return Err(~"only one input file may be specified");
}
let input = matches.free[0].as_slice();
match matches.opt_str("r") {
Some(~"rust") => Ok(rust_input(input, matches)),
Some(~"json") => json_input(input),
Some(s) => Err("unknown input format: " + s),
None => {
if input.ends_with(".json") {
json_input(input)
} else {
Ok(rust_input(input, matches))
}
}
}
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
let mut default_passes = !matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins");
// First, parse the crate and extract all relevant information.
let libs = Cell::new(matches.opt_strs("L").map(|s| Path(*s)));
let cr = Cell::new(Path(cratefile));
@ -188,7 +227,8 @@ pub fn main_args(args: &[~str]) -> int {
}
// Load all plugins/passes into a PluginManager
let mut pm = plugins::PluginManager::new(Path("/tmp/rustdoc_ng/plugins"));
let path = matches.opt_str("plugin-path").unwrap_or(~"/tmp/rustdoc_ng/plugins");
let mut pm = plugins::PluginManager::new(Path(path));
for pass in passes.iter() {
let plugin = match PASSES.iter().position(|&(p, _, _)| p == *pass) {
Some(i) => PASSES[i].n1(),
@ -206,45 +246,73 @@ pub fn main_args(args: &[~str]) -> int {
// Run everything!
info2!("Executing passes/plugins");
let (crate, res) = pm.run_plugins(crate);
info2!("going to format");
let started = time::precise_time_ns();
let output = matches.opt_str("o").map(|s| Path(*s));
match format {
HTML => { html::render::run(crate, output.unwrap_or(Path("doc"))) }
JSON => { jsonify(crate, res, output.unwrap_or(Path("doc.json"))) }
}
let ended = time::precise_time_ns();
info2!("Took {:.03f}s", (ended as f64 - started as f64) / 1000000000f64);
return 0;
return pm.run_plugins(crate);
}
fn jsonify(crate: clean::Crate, res: ~[plugins::PluginJson], dst: Path) {
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, ~str> {
let input = match ::std::io::file_reader(&Path(input)) {
Ok(i) => i,
Err(s) => return Err(s),
};
match json::from_reader(input) {
Err(s) => Err(s.to_str()),
Ok(json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.pop(&~"schema") {
Some(json::String(version)) => {
if version.as_slice() != SCHEMA_VERSION {
return Err(format!("sorry, but I only understand \
version {}", SCHEMA_VERSION))
}
}
Some(*) => return Err(~"malformed json"),
None => return Err(~"expected a schema version"),
}
let crate = match obj.pop(&~"crate") {
Some(json) => {
let mut d = json::Decoder(json);
Decodable::decode(&mut d)
}
None => return Err(~"malformed json"),
};
// XXX: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
let plugin_output = ~[];
Ok((crate, plugin_output))
}
Ok(*) => Err(~"malformed json input: expected an object at the top"),
}
}
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(crate: clean::Crate, res: ~[plugins::PluginJson], dst: Path) {
// {
// "schema": version,
// "crate": { parsed crate ... },
// "plugins": { output of plugins ... }
// }
let mut json = ~extra::treemap::TreeMap::new();
json.insert(~"schema", extra::json::String(SCHEMA_VERSION.to_owned()));
json.insert(~"schema", json::String(SCHEMA_VERSION.to_owned()));
let plugins_json = ~res.move_iter().filter_map(|opt| opt).collect();
// FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to .encode
// straight to the Rust JSON representation.
let crate_json_str = do std::io::with_str_writer |w| {
crate.encode(&mut extra::json::Encoder(w));
crate.encode(&mut json::Encoder(w));
};
let crate_json = match extra::json::from_str(crate_json_str) {
let crate_json = match json::from_str(crate_json_str) {
Ok(j) => j,
Err(_) => fail2!("Rust generated JSON is invalid??")
};
json.insert(~"crate", crate_json);
json.insert(~"plugins", extra::json::Object(plugins_json));
json.insert(~"plugins", json::Object(plugins_json));
let mut file = dst.open_writer(io::Create).unwrap();
let output = extra::json::Object(json).to_str();
let output = json::Object(json).to_str();
file.write(output.as_bytes());
}