// contents reads and returns the content of the named file // (from the virtual file system, so for example /doc refers to $GOROOT/doc). func (c *Corpus) contents(name string) string { file, err := vfs.ReadFile(c.fs, name) if err != nil { log.Panic(err) } return string(file) }
func (c *Corpus) parseFile(fset *token.FileSet, filename string, mode parser.Mode) (*ast.File, error) { src, err := vfs.ReadFile(c.fs, filename) if err != nil { return nil, err } // Temporary ad-hoc fix for issue 5247. // TODO(gri) Remove this in favor of a better fix, eventually (see issue 7702). replaceLinePrefixCommentsWithBlankLine(src) return parser.ParseFile(fset, filename, src, mode) }
func (p *Presentation) ServeHTMLDoc(w http.ResponseWriter, r *http.Request, abspath, relpath string) { // get HTML body contents src, err := vfs.ReadFile(p.Corpus.fs, abspath) if err != nil { log.Printf("ReadFile: %s", err) p.ServeError(w, r, relpath, err) return } // if it begins with "<!DOCTYPE " assume it is standalone // html that doesn't need the template wrapping. if bytes.HasPrefix(src, doctype) { w.Write(src) return } // if it begins with a JSON blob, read in the metadata. meta, src, err := extractMetadata(src) if err != nil { log.Printf("decoding metadata %s: %v", relpath, err) } // evaluate as template if indicated if meta.Template { tmpl, err := template.New("main").Funcs(p.TemplateFuncs()).Parse(string(src)) if err != nil { log.Printf("parsing template %s: %v", relpath, err) p.ServeError(w, r, relpath, err) return } var buf bytes.Buffer if err := tmpl.Execute(&buf, nil); err != nil { log.Printf("executing template %s: %v", relpath, err) p.ServeError(w, r, relpath, err) return } src = buf.Bytes() } // if it's the language spec, add tags to EBNF productions if strings.HasSuffix(abspath, "go_spec.html") { var buf bytes.Buffer Linkify(&buf, src) src = buf.Bytes() } p.ServePage(w, Page{ Title: meta.Title, Subtitle: meta.Subtitle, Body: src, }) }
func (p *Presentation) serveTextFile(w http.ResponseWriter, r *http.Request, abspath, relpath, title string) { src, err := vfs.ReadFile(p.Corpus.fs, abspath) if err != nil { log.Printf("ReadFile: %s", err) p.ServeError(w, r, relpath, err) return } if r.FormValue("m") == "text" { p.ServeText(w, src) return } h := r.FormValue("h") s := RangeSelection(r.FormValue("s")) var buf bytes.Buffer if pathpkg.Ext(abspath) == ".go" { // Find markup links for this file (e.g. "/src/pkg/fmt/print.go"). fi := p.Corpus.Analysis.FileInfo(abspath) buf.WriteString("<script type='text/javascript'>document.ANALYSIS_DATA = ") buf.Write(marshalJSON(fi.Data)) buf.WriteString(";</script>\n") if status := p.Corpus.Analysis.Status(); status != "" { buf.WriteString("<a href='/lib/godoc/analysis/help.html'>Static analysis features</a> ") // TODO(adonovan): show analysis status at per-file granularity. fmt.Fprintf(&buf, "<span style='color: grey'>[%s]</span><br/>", htmlpkg.EscapeString(status)) } buf.WriteString("<pre>") formatGoSource(&buf, src, fi.Links, h, s) buf.WriteString("</pre>") } else { buf.WriteString("<pre>") FormatText(&buf, src, 1, false, h, s) buf.WriteString("</pre>") } fmt.Fprintf(&buf, `<p><a href="/%s?m=text">View as plain text</a></p>`, htmlpkg.EscapeString(relpath)) p.ServePage(w, Page{ Title: title + " " + relpath, Tabtitle: relpath, Body: buf.Bytes(), }) }
// UpdateMetadata scans $GOROOT/doc for HTML files, reads their metadata, // and updates the DocMetadata map. func (c *Corpus) updateMetadata() { metadata := make(map[string]*Metadata) var scan func(string) // scan is recursive scan = func(dir string) { fis, err := c.fs.ReadDir(dir) if err != nil { log.Println("updateMetadata:", err) return } for _, fi := range fis { name := pathpkg.Join(dir, fi.Name()) if fi.IsDir() { scan(name) // recurse continue } if !strings.HasSuffix(name, ".html") { continue } // Extract metadata from the file. b, err := vfs.ReadFile(c.fs, name) if err != nil { log.Printf("updateMetadata %s: %v", name, err) continue } meta, _, err := extractMetadata(b) if err != nil { log.Printf("updateMetadata: %s: %v", name, err) continue } // Store relative filesystem path in Metadata. meta.filePath = name if meta.Path == "" { // If no Path, canonical path is actual path. meta.Path = meta.filePath } // Store under both paths. metadata[meta.Path] = &meta metadata[meta.filePath] = &meta } } scan("/doc") c.docMetadata.Set(metadata) }
// getPageInfo returns the PageInfo for a package directory abspath. If the // parameter genAST is set, an AST containing only the package exports is // computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc) // is extracted from the AST. If there is no corresponding package in the // directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub- // directories, PageInfo.Dirs is nil. If an error occurred, PageInfo.Err is // set to the respective error but the error is not logged. // func (h *handlerServer) GetPageInfo(abspath, relpath string, mode PageInfoMode) *PageInfo { info := &PageInfo{Dirname: abspath} // Restrict to the package files that would be used when building // the package on this system. This makes sure that if there are // separate implementations for, say, Windows vs Unix, we don't // jumble them all together. // Note: Uses current binary's GOOS/GOARCH. // To use different pair, such as if we allowed the user to choose, // set ctxt.GOOS and ctxt.GOARCH before calling ctxt.ImportDir. ctxt := build.Default ctxt.IsAbsPath = pathpkg.IsAbs ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) { return h.c.fs.ReadDir(filepath.ToSlash(dir)) } ctxt.OpenFile = func(name string) (r io.ReadCloser, err error) { data, err := vfs.ReadFile(h.c.fs, filepath.ToSlash(name)) if err != nil { return nil, err } return ioutil.NopCloser(bytes.NewReader(data)), nil } pkginfo, err := ctxt.ImportDir(abspath, 0) // continue if there are no Go source files; we still want the directory info if _, nogo := err.(*build.NoGoError); err != nil && !nogo { info.Err = err return info } // collect package files pkgname := pkginfo.Name pkgfiles := append(pkginfo.GoFiles, pkginfo.CgoFiles...) if len(pkgfiles) == 0 { // Commands written in C have no .go files in the build. // Instead, documentation may be found in an ignored file. // The file may be ignored via an explicit +build ignore // constraint (recommended), or by defining the package // documentation (historic). pkgname = "main" // assume package main since pkginfo.Name == "" pkgfiles = pkginfo.IgnoredGoFiles } // get package information, if any if len(pkgfiles) > 0 { // build package AST fset := token.NewFileSet() files, err := h.c.parseFiles(fset, relpath, abspath, pkgfiles) if err != nil { info.Err = err return info } // ignore any errors - they are due to unresolved identifiers pkg, _ := ast.NewPackage(fset, files, poorMansImporter, nil) // extract package documentation info.FSet = fset if mode&ShowSource == 0 { // show extracted documentation var m doc.Mode if mode&NoFiltering != 0 { m |= doc.AllDecls } if mode&AllMethods != 0 { m |= doc.AllMethods } info.PDoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath if mode&NoTypeAssoc != 0 { for _, t := range info.PDoc.Types { info.PDoc.Consts = append(info.PDoc.Consts, t.Consts...) info.PDoc.Vars = append(info.PDoc.Vars, t.Vars...) info.PDoc.Funcs = append(info.PDoc.Funcs, t.Funcs...) t.Consts = nil t.Vars = nil t.Funcs = nil } // for now we cannot easily sort consts and vars since // go/doc.Value doesn't export the order information sort.Sort(funcsByName(info.PDoc.Funcs)) } // collect examples testfiles := append(pkginfo.TestGoFiles, pkginfo.XTestGoFiles...) files, err = h.c.parseFiles(fset, relpath, abspath, testfiles) if err != nil { log.Println("parsing examples:", err) } info.Examples = collectExamples(h.c, pkg, files) // collect any notes that we want to show if info.PDoc.Notes != nil { // could regexp.Compile only once per godoc, but probably not worth it if rx := h.p.NotesRx; rx != nil { for m, n := range info.PDoc.Notes { if rx.MatchString(m) { if info.Notes == nil { info.Notes = make(map[string][]*doc.Note) } info.Notes[m] = n } } } } } else { // show source code // TODO(gri) Consider eliminating export filtering in this mode, // or perhaps eliminating the mode altogether. if mode&NoFiltering == 0 { packageExports(fset, pkg) } info.PAst = files } info.IsMain = pkgname == "main" } // get directory information, if any var dir *Directory var timestamp time.Time if tree, ts := h.c.fsTree.Get(); tree != nil && tree.(*Directory) != nil { // directory tree is present; lookup respective directory // (may still fail if the file system was updated and the // new directory tree has not yet been computed) dir = tree.(*Directory).lookup(abspath) timestamp = ts } if dir == nil { // no directory tree present (too early after startup or // command-line mode); compute one level for this page // note: cannot use path filter here because in general // it doesn't contain the FSTree path dir = h.c.newDirectory(abspath, 1) timestamp = time.Now() } info.Dirs = dir.listing(true) info.DirTime = timestamp info.DirFlat = mode&FlatDir != 0 return info }