func (p *localTranslater) loadDocCode(lang, importPath string) []byte { // {FS}:/src/importPath/doc_$(lang)_GOOS_GOARCH.go // {FS}:/src/importPath/doc_$(lang)_GOARCH.go // {FS}:/src/importPath/doc_$(lang)_GOOS.go // {FS}:/src/importPath/doc_$(lang).go filenames := []string{ fmt.Sprintf("/src/%s/doc_%s_%s_%s.go", importPath, lang, defaultGodocGoos, defaultGodocGoarch), fmt.Sprintf("/src/%s/doc_%s_%s.go", importPath, lang, defaultGodocGoarch), fmt.Sprintf("/src/%s/doc_%s_%s.go", importPath, lang, defaultGodocGoos), fmt.Sprintf("/src/%s/doc_%s.go", importPath, lang), } for i := 0; i < len(filenames); i++ { // $(GOROOT)/translates/ if p.fileExists(defaultLocalFS, filenames[i]) { docCode, _ := vfs.ReadFile(defaultLocalFS, filenames[i]) if docCode != nil { return docCode } } // $(GOROOT)/ if p.fileExists(defaultRootFS, filenames[i]) { docCode, _ := vfs.ReadFile(defaultRootFS, filenames[i]) if docCode != nil { return docCode } } } return nil }
// parseFiles is the helper for the method and function. If the argument // template is nil, it is created from the first file. func parseFiles(fs vfs.FileSystem, t *template.Template, filenames ...string) (*template.Template, error) { if len(filenames) == 0 { // Not really a problem, but be consistent. return nil, fmt.Errorf("vfs/html/vfstemplate: no files named in call to ParseFiles") } for _, filename := range filenames { b, err := vfs.ReadFile(fs, filename) if err != nil { return nil, err } s := string(b) name := path.Base(filename) // First template becomes return value if not already defined, // and we use that one for subsequent New calls to associate // all the templates together. Also, if this file has the same name // as t, this file becomes the contents of t, so // t, err := New(name).Funcs(xxx).ParseFiles(name) // works. Otherwise we create a new template associated with t. var tmpl *template.Template if t == nil { t = template.New(name) } if name == t.Name() { tmpl = t } else { tmpl = t.New(name) } _, err = tmpl.Parse(s) if err != nil { return nil, err } } return t, nil }
// contents reads and returns the content of the named file // (from the virtual file system, so for example /doc refers to $GOROOT/doc). func (c *Corpus) contents(name string) string { file, err := vfs.ReadFile(c.fs, name) if err != nil { log.Panic(err) } return string(file) }
// loadCodewalk reads a codewalk from the named XML file. func loadCodewalk(filename string) (*Codewalk, error) { f, err := fs.Open(filename) if err != nil { return nil, err } defer f.Close() cw := new(Codewalk) d := xml.NewDecoder(f) d.Entity = xml.HTMLEntity err = d.Decode(cw) if err != nil { return nil, &os.PathError{Op: "parsing", Path: filename, Err: err} } // Compute file list, evaluate line numbers for addresses. m := make(map[string]bool) for _, st := range cw.Step { i := strings.Index(st.Src, ":") if i < 0 { i = len(st.Src) } filename := st.Src[0:i] data, err := vfs.ReadFile(fs, filename) if err != nil { st.Err = err continue } if i < len(st.Src) { lo, hi, err := addrToByteRange(st.Src[i+1:], 0, data) if err != nil { st.Err = err continue } // Expand match to line boundaries. for lo > 0 && data[lo-1] != '\n' { lo-- } for hi < len(data) && (hi == 0 || data[hi-1] != '\n') { hi++ } st.Lo = byteToLine(data, lo) st.Hi = byteToLine(data, hi-1) } st.Data = data st.File = filename m[filename] = true } // Make list of files cw.File = make([]string, len(m)) i := 0 for f := range m { cw.File[i] = f i++ } sort.Strings(cw.File) return cw, nil }
func (p *Presentation) ServeHTMLDoc(w http.ResponseWriter, r *http.Request, abspath, relpath string) { // get HTML body contents src, err := vfs.ReadFile(p.Corpus.fs, abspath) if err != nil { log.Printf("ReadFile: %s", err) p.ServeError(w, r, relpath, err) return } // if it begins with "<!DOCTYPE " assume it is standalone // html that doesn't need the template wrapping. if bytes.HasPrefix(src, doctype) { w.Write(src) return } // if it begins with a JSON blob, read in the metadata. meta, src, err := extractMetadata(src) if err != nil { log.Printf("decoding metadata %s: %v", relpath, err) } page := Page{ Title: meta.Title, Subtitle: meta.Subtitle, Share: allowShare(r), } // evaluate as template if indicated if meta.Template { tmpl, err := template.New("main").Funcs(p.TemplateFuncs()).Parse(string(src)) if err != nil { log.Printf("parsing template %s: %v", relpath, err) p.ServeError(w, r, relpath, err) return } var buf bytes.Buffer if err := tmpl.Execute(&buf, page); err != nil { log.Printf("executing template %s: %v", relpath, err) p.ServeError(w, r, relpath, err) return } src = buf.Bytes() } // if it's the language spec, add tags to EBNF productions if strings.HasSuffix(abspath, "go_spec.html") { var buf bytes.Buffer Linkify(&buf, src) src = buf.Bytes() } page.Body = src p.ServePage(w, page) }
func (c *Corpus) parseFile(fset *token.FileSet, filename string, mode parser.Mode) (*ast.File, error) { src, err := vfs.ReadFile(c.fs, filename) if err != nil { return nil, err } // Temporary ad-hoc fix for issue 5247. // TODO(gri) Remove this in favor of a better fix, eventually (see issue 7702). replaceLinePrefixCommentsWithBlankLine(src) return parser.ParseFile(fset, filename, src, mode) }
func (p *Presentation) serveTextFile(w http.ResponseWriter, r *http.Request, abspath, relpath, title string) { src, err := vfs.ReadFile(p.Corpus.fs, abspath) if err != nil { log.Printf("ReadFile: %s", err) p.ServeError(w, r, relpath, err) return } if r.FormValue("m") == "text" { p.ServeText(w, src) return } h := r.FormValue("h") s := RangeSelection(r.FormValue("s")) var buf bytes.Buffer if pathpkg.Ext(abspath) == ".go" { // Find markup links for this file (e.g. "/src/fmt/print.go"). fi := p.Corpus.Analysis.FileInfo(abspath) buf.WriteString("<script type='text/javascript'>document.ANALYSIS_DATA = ") buf.Write(marshalJSON(fi.Data)) buf.WriteString(";</script>\n") if status := p.Corpus.Analysis.Status(); status != "" { buf.WriteString("<a href='/lib/godoc/analysis/help.html'>Static analysis features</a> ") // TODO(adonovan): show analysis status at per-file granularity. fmt.Fprintf(&buf, "<span style='color: grey'>[%s]</span><br/>", htmlpkg.EscapeString(status)) } buf.WriteString("<pre>") formatGoSource(&buf, src, fi.Links, h, s) buf.WriteString("</pre>") } else { buf.WriteString("<pre>") FormatText(&buf, src, 1, false, h, s) buf.WriteString("</pre>") } fmt.Fprintf(&buf, `<p><a href="/%s?m=text">View as plain text</a></p>`, htmlpkg.EscapeString(relpath)) p.ServePage(w, Page{ Title: title + " " + relpath, Tabtitle: relpath, Body: buf.Bytes(), Share: allowShare(r), }) }
func readTemplate(name string) *template.Template { if pres == nil { panic("no global Presentation set yet") } path := "lib/godoc/" + name // use underlying file system fs to read the template file // (cannot use template ParseFile functions directly) data, err := vfs.ReadFile(fs, path) if err != nil { log.Fatal("readTemplate: ", err) } // be explicit with errors (for app engine use) t, err := template.New(name).Funcs(pres.FuncMap()).Parse(string(data)) if err != nil { log.Fatal("readTemplate: ", err) } return t }
// UpdateMetadata scans $GOROOT/doc for HTML files, reads their metadata, // and updates the DocMetadata map. func (c *Corpus) updateMetadata() { metadata := make(map[string]*Metadata) var scan func(string) // scan is recursive scan = func(dir string) { fis, err := c.fs.ReadDir(dir) if err != nil { log.Println("updateMetadata:", err) return } for _, fi := range fis { name := pathpkg.Join(dir, fi.Name()) if fi.IsDir() { scan(name) // recurse continue } if !strings.HasSuffix(name, ".html") { continue } // Extract metadata from the file. b, err := vfs.ReadFile(c.fs, name) if err != nil { log.Printf("updateMetadata %s: %v", name, err) continue } meta, _, err := extractMetadata(b) if err != nil { log.Printf("updateMetadata: %s: %v", name, err) continue } // Store relative filesystem path in Metadata. meta.filePath = name if meta.Path == "" { // If no Path, canonical path is actual path. meta.Path = meta.filePath } // Store under both paths. metadata[meta.Path] = &meta metadata[meta.filePath] = &meta } } scan("/doc") c.docMetadata.Set(metadata) }
// codewalkFileprint serves requests with ?fileprint=f&lo=lo&hi=hi. // The filename f has already been retrieved and is passed as an argument. // Lo and hi are the numbers of the first and last line to highlight // in the response. This format is used for the middle window pane // of the codewalk pages. It is a separate iframe and does not get // the usual godoc HTML wrapper. func codewalkFileprint(w http.ResponseWriter, r *http.Request, f string) { abspath := f data, err := vfs.ReadFile(fs, abspath) if err != nil { log.Print(err) pres.ServeError(w, r, f, err) return } lo, _ := strconv.Atoi(r.FormValue("lo")) hi, _ := strconv.Atoi(r.FormValue("hi")) if hi < lo { hi = lo } lo = lineToByte(data, lo) hi = lineToByte(data, hi+1) // Put the mark 4 lines before lo, so that the iframe // shows a few lines of context before the highlighted // section. n := 4 mark := lo for ; mark > 0 && n > 0; mark-- { if data[mark-1] == '\n' { if n--; n == 0 { break } } } io.WriteString(w, `<style type="text/css">@import "/doc/codewalk/codewalk.css";</style><pre>`) template.HTMLEscape(w, data[0:mark]) io.WriteString(w, "<a name='mark'></a>") template.HTMLEscape(w, data[mark:lo]) if lo < hi { io.WriteString(w, "<div class='codewalkhighlight'>") template.HTMLEscape(w, data[lo:hi]) io.WriteString(w, "</div>") } template.HTMLEscape(w, data[hi:]) io.WriteString(w, "</pre>") }
// getPageInfo returns the PageInfo for a package directory abspath. If the // parameter genAST is set, an AST containing only the package exports is // computed (PageInfo.PAst), otherwise package documentation (PageInfo.Doc) // is extracted from the AST. If there is no corresponding package in the // directory, PageInfo.PAst and PageInfo.PDoc are nil. If there are no sub- // directories, PageInfo.Dirs is nil. If an error occurred, PageInfo.Err is // set to the respective error but the error is not logged. // func (h *handlerServer) GetPageInfo(abspath, relpath string, mode PageInfoMode, goos, goarch string) *PageInfo { info := &PageInfo{Dirname: abspath} // Restrict to the package files that would be used when building // the package on this system. This makes sure that if there are // separate implementations for, say, Windows vs Unix, we don't // jumble them all together. // Note: If goos/goarch aren't set, the current binary's GOOS/GOARCH // are used. ctxt := build.Default ctxt.IsAbsPath = pathpkg.IsAbs ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) { f, err := h.c.fs.ReadDir(filepath.ToSlash(dir)) filtered := make([]os.FileInfo, 0, len(f)) for _, i := range f { if mode&NoFiltering != 0 || i.Name() != "internal" { filtered = append(filtered, i) } } return filtered, err } ctxt.OpenFile = func(name string) (r io.ReadCloser, err error) { data, err := vfs.ReadFile(h.c.fs, filepath.ToSlash(name)) if err != nil { return nil, err } return ioutil.NopCloser(bytes.NewReader(data)), nil } if goos != "" { ctxt.GOOS = goos } if goarch != "" { ctxt.GOARCH = goarch } pkginfo, err := ctxt.ImportDir(abspath, 0) // continue if there are no Go source files; we still want the directory info if _, nogo := err.(*build.NoGoError); err != nil && !nogo { info.Err = err return info } // collect package files pkgname := pkginfo.Name pkgfiles := append(pkginfo.GoFiles, pkginfo.CgoFiles...) if len(pkgfiles) == 0 { // Commands written in C have no .go files in the build. // Instead, documentation may be found in an ignored file. // The file may be ignored via an explicit +build ignore // constraint (recommended), or by defining the package // documentation (historic). pkgname = "main" // assume package main since pkginfo.Name == "" pkgfiles = pkginfo.IgnoredGoFiles } // get package information, if any if len(pkgfiles) > 0 { // build package AST fset := token.NewFileSet() files, err := h.c.parseFiles(fset, relpath, abspath, pkgfiles) if err != nil { info.Err = err return info } // ignore any errors - they are due to unresolved identifiers pkg, _ := ast.NewPackage(fset, files, poorMansImporter, nil) // extract package documentation info.FSet = fset if mode&ShowSource == 0 { // show extracted documentation var m doc.Mode if mode&NoFiltering != 0 { m |= doc.AllDecls } if mode&AllMethods != 0 { m |= doc.AllMethods } info.PDoc = doc.New(pkg, pathpkg.Clean(relpath), m) // no trailing '/' in importpath if mode&NoTypeAssoc != 0 { for _, t := range info.PDoc.Types { info.PDoc.Consts = append(info.PDoc.Consts, t.Consts...) info.PDoc.Vars = append(info.PDoc.Vars, t.Vars...) info.PDoc.Funcs = append(info.PDoc.Funcs, t.Funcs...) t.Consts = nil t.Vars = nil t.Funcs = nil } // for now we cannot easily sort consts and vars since // go/doc.Value doesn't export the order information sort.Sort(funcsByName(info.PDoc.Funcs)) } // collect examples testfiles := append(pkginfo.TestGoFiles, pkginfo.XTestGoFiles...) files, err = h.c.parseFiles(fset, relpath, abspath, testfiles) if err != nil { log.Println("parsing examples:", err) } info.Examples = collectExamples(h.c, pkg, files) // collect any notes that we want to show if info.PDoc.Notes != nil { // could regexp.Compile only once per godoc, but probably not worth it if rx := h.p.NotesRx; rx != nil { for m, n := range info.PDoc.Notes { if rx.MatchString(m) { if info.Notes == nil { info.Notes = make(map[string][]*doc.Note) } info.Notes[m] = n } } } } } else { // show source code // TODO(gri) Consider eliminating export filtering in this mode, // or perhaps eliminating the mode altogether. if mode&NoFiltering == 0 { packageExports(fset, pkg) } info.PAst = files } info.IsMain = pkgname == "main" } // get directory information, if any var dir *Directory var timestamp time.Time if tree, ts := h.c.fsTree.Get(); tree != nil && tree.(*Directory) != nil { // directory tree is present; lookup respective directory // (may still fail if the file system was updated and the // new directory tree has not yet been computed) dir = tree.(*Directory).lookup(abspath) timestamp = ts } if dir == nil { // no directory tree present (too early after startup or // command-line mode); compute one level for this page // note: cannot use path filter here because in general // it doesn't contain the FSTree path dir = h.c.newDirectory(abspath, 1) timestamp = time.Now() } info.Dirs = dir.listing(true, func(path string) bool { return h.includePath(path, mode) }) info.DirTime = timestamp info.DirFlat = mode&FlatDir != 0 return info }
// NewServer constructs a new Server using the specified config. func NewServer(cfg Config) (*Server, error) { present.PlayEnabled = cfg.PlayEnabled parse := func(fs vfs.FileSystem, t *template.Template, filenames ...string) (*template.Template, error) { if t == nil { t = template.New(filenames[0]).Funcs(funcMap) } else { t = t.Funcs(funcMap) } for _, name := range filenames { data, err := vfs.ReadFile(fs, filepath.ToSlash(filepath.Join(cfg.TemplatePath, name))) if err != nil { return nil, err } if _, err := t.Parse(string(data)); err != nil { return nil, err } } return t, nil } s := &Server{cfg: cfg} // Parse templates. var err error s.template.home, err = parse(s.cfg.RootFS, nil, "root.tmpl", "home.tmpl") if err != nil { return nil, err } s.template.index, err = parse(s.cfg.RootFS, nil, "root.tmpl", "index.tmpl") if err != nil { return nil, err } s.template.article, err = parse(s.cfg.RootFS, nil, "root.tmpl", "article.tmpl") if err != nil { return nil, err } s.template.doc, err = parse(s.cfg.RootFS, present.Template(), "doc.tmpl") if err != nil { return nil, err } // Load content. err = s.loadDocs(s.cfg.ContentPath) if err != nil { return nil, err } err = s.renderAtomFeed() if err != nil { return nil, err } err = s.renderJSONFeed() if err != nil { return nil, err } // Set up content file server. s.content = http.StripPrefix(s.cfg.BasePath, http.FileServer( httpfs.New(getNameSpace(s.cfg.RootFS, s.cfg.ContentPath)), )) return s, nil }
// loadDocs reads all content from the provided file system root, renders all // the articles it finds, adds them to the Server's docs field, computes the // denormalized docPaths, docTags, and tags fields, and populates the various // helper fields (Next, Previous, Related) for each Doc. func (s *Server) loadDocs(root string) error { // Read content into docs field. const ext = ".article" fn := func(fs vfs.FileSystem, p string, info os.FileInfo, err error) error { if filepath.Ext(p) != ext { return nil } f, err := fs.Open(p) if err != nil { return err } defer f.Close() ctx := &present.Context{ ReadFile: func(filename string) ([]byte, error) { return vfs.ReadFile(s.cfg.RootFS, filepath.ToSlash(filename)) }, } d, err := ctx.Parse(f, p, 0) if err != nil { return err } html := new(bytes.Buffer) err = d.Render(html, s.template.doc) if err != nil { return err } p = p[len(root) : len(p)-len(ext)] // trim root and extension p = filepath.ToSlash(p) s.docs = append(s.docs, &Doc{ Doc: d, Path: filepath.ToSlash(s.cfg.BasePath + p), Permalink: s.cfg.BaseURL + p, HTML: template.HTML(html.String()), }) return nil } err := Walk(s.cfg.RootFS, root, fn) if err != nil { return err } sort.Sort(docsByTime(s.docs)) // Pull out doc paths and tags and put in reverse-associating maps. s.docPaths = make(map[string]*Doc) s.docTags = make(map[string][]*Doc) for _, d := range s.docs { s.docPaths[strings.TrimPrefix(d.Path, s.cfg.BasePath)] = d for _, t := range d.Tags { s.docTags[t] = append(s.docTags[t], d) } } // Pull out unique sorted list of tags. for t := range s.docTags { s.tags = append(s.tags, t) } sort.Strings(s.tags) // Set up presentation-related fields, Newer, Older, and Related. for _, doc := range s.docs { // Newer, Older: docs adjacent to doc for i := range s.docs { if s.docs[i] != doc { continue } if i > 0 { doc.Newer = s.docs[i-1] } if i+1 < len(s.docs) { doc.Older = s.docs[i+1] } break } // Related: all docs that share tags with doc. related := make(map[*Doc]bool) for _, t := range doc.Tags { for _, d := range s.docTags[t] { if d != doc { related[d] = true } } } for d := range related { doc.Related = append(doc.Related, d) } sort.Sort(docsByTime(doc.Related)) } return nil }