// Put adds the package documentation to the database. func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time, hide bool) error { c := db.Pool.Get() defer c.Close() score := 0.0 if !hide { score = documentScore(pdoc) } terms := documentTerms(pdoc, score) var gobBuf bytes.Buffer if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil { return err } // Truncate large documents. if gobBuf.Len() > 200000 { pdocNew := *pdoc pdoc = &pdocNew pdoc.Truncated = true pdoc.Vars = nil pdoc.Funcs = nil pdoc.Types = nil pdoc.Consts = nil pdoc.Examples = nil gobBuf.Reset() if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil { return err } } gobBytes, err := snappy.Encode(nil, gobBuf.Bytes()) if err != nil { return err } kind := "p" switch { case pdoc.Name == "": kind = "d" case pdoc.IsCmd: kind = "c" } t := int64(0) if !nextCrawl.IsZero() { t = nextCrawl.Unix() } _, err = putScript.Do(c, pdoc.ImportPath, pdoc.Synopsis, score, gobBytes, strings.Join(terms, " "), pdoc.Etag, kind, t) if err != nil { return err } if nextCrawl.IsZero() { // Skip crawling related packages if this is not a full save. return nil } paths := make(map[string]bool) for _, p := range pdoc.Imports { if gosrc.IsValidRemotePath(p) { paths[p] = true } } for _, p := range pdoc.TestImports { if gosrc.IsValidRemotePath(p) { paths[p] = true } } for _, p := range pdoc.XTestImports { if gosrc.IsValidRemotePath(p) { paths[p] = true } } if pdoc.ImportPath != pdoc.ProjectRoot && pdoc.ProjectRoot != "" { paths[pdoc.ProjectRoot] = true } for _, p := range pdoc.Subdirectories { paths[pdoc.ImportPath+"/"+p] = true } args := make([]interface{}, 0, len(paths)) for p := range paths { args = append(args, p) } _, err = addCrawlScript.Do(c, args...) return err }
// Put adds the package documentation to the database. func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time, hide bool) error { c := db.Pool.Get() defer c.Close() score := 0.0 if !hide { score = documentScore(pdoc) } terms := documentTerms(pdoc, score) var gobBuf bytes.Buffer if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil { return err } gobBytes := snappy.Encode(nil, gobBuf.Bytes()) // Truncate large documents. if len(gobBytes) > 400000 { pdocNew := *pdoc pdoc = &pdocNew pdoc.Truncated = true pdoc.Vars = nil pdoc.Funcs = nil pdoc.Types = nil pdoc.Consts = nil pdoc.Examples = nil gobBuf.Reset() if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil { return err } gobBytes = snappy.Encode(nil, gobBuf.Bytes()) } kind := "p" switch { case pdoc.Name == "": kind = "d" case pdoc.IsCmd: kind = "c" } t := int64(0) if !nextCrawl.IsZero() { t = nextCrawl.Unix() } // Get old version of the package to extract its imports. // If the package does not exist, both oldDoc and err will be nil. old, _, err := db.getDoc(c, pdoc.ImportPath) if err != nil { return err } _, err = putScript.Do(c, pdoc.ImportPath, pdoc.Synopsis, score, gobBytes, strings.Join(terms, " "), pdoc.Etag, kind, t) if err != nil { return err } id, n, err := pkgIDAndImportCount(c, pdoc.ImportPath) if err != nil { return err } ctx := bgCtx() if score > 0 { if err := PutIndex(ctx, pdoc, id, score, n); err != nil { log.Printf("Cannot put %q in index: %v", pdoc.ImportPath, err) } if old != nil { if err := updateImportsIndex(c, ctx, old, pdoc); err != nil { return err } } } else { if err := deleteIndex(ctx, id); err != nil { return err } } if nextCrawl.IsZero() { // Skip crawling related packages if this is not a full save. return nil } paths := make(map[string]bool) for _, p := range pdoc.Imports { if gosrc.IsValidRemotePath(p) { paths[p] = true } } for _, p := range pdoc.TestImports { if gosrc.IsValidRemotePath(p) { paths[p] = true } } for _, p := range pdoc.XTestImports { if gosrc.IsValidRemotePath(p) { paths[p] = true } } if pdoc.ImportPath != pdoc.ProjectRoot && pdoc.ProjectRoot != "" { paths[pdoc.ProjectRoot] = true } for _, p := range pdoc.Subdirectories { paths[pdoc.ImportPath+"/"+p] = true } args := make([]interface{}, 0, len(paths)) for p := range paths { args = append(args, p) } _, err = addCrawlScript.Do(c, args...) return err }