Пример #1
0
func updateImportsIndex(c redis.Conn, ctx context.Context, oldDoc, newDoc *doc.Package) error {
	// Create a map to store any import change since last time we indexed the package.
	changes := make(map[string]bool)
	for _, p := range oldDoc.Imports {
		if gosrc.IsValidRemotePath(p) {
			changes[p] = true
		}
	}
	for _, p := range newDoc.Imports {
		if gosrc.IsValidRemotePath(p) {
			delete(changes, p)
		}
	}

	// For each import change, re-index that package with updated NumImported.
	// In practice this should not happen often and when it does, the changes are
	// likely to be a small amount.
	for p, _ := range changes {
		id, n, err := pkgIDAndImportCount(c, p)
		if err != nil {
			return err
		}
		if id != "" {
			PutIndex(ctx, nil, id, -1, n)
		}
	}
	return nil
}
Пример #2
0
func serveAPISearch(resp http.ResponseWriter, req *http.Request) error {
	q := strings.TrimSpace(req.Form.Get("q"))

	var pkgs []database.Package

	if gosrc.IsValidRemotePath(q) || (strings.Contains(q, "/") && gosrc.IsGoRepoPath(q)) {
		pdoc, _, err := getDoc(q, robotRequest)
		if err == nil && pdoc != nil {
			pkgs = []database.Package{{Path: pdoc.ImportPath, Synopsis: pdoc.Synopsis}}
		}
	}

	if pkgs == nil {
		var err error
		pkgs, err = db.Query(q)
		if err != nil {
			return err
		}
	}

	var data = struct {
		Results []database.Package `json:"results"`
	}{
		pkgs,
	}
	resp.Header().Set("Content-Type", jsonMIMEType)
	return json.NewEncoder(resp).Encode(&data)
}
Пример #3
0
func serveAPISearch(resp http.ResponseWriter, req *http.Request) error {
	q := strings.TrimSpace(req.Form.Get("q"))

	var pkgs []database.Package

	if gosrc.IsValidRemotePath(q) || (strings.Contains(q, "/") && gosrc.IsGoRepoPath(q)) {
		pdoc, _, err := getDoc(q, apiRequest)
		if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
			pdoc, _, err = getDoc(e.Redirect, robotRequest)
		}
		if err == nil && pdoc != nil {
			pkgs = []database.Package{{Path: pdoc.ImportPath, Synopsis: pdoc.Synopsis}}
		}
	}

	if pkgs == nil {
		var err error
		ctx := appengine.NewContext(req)
		pkgs, err = database.Search(ctx, q)
		if err != nil {
			return err
		}
	}

	var data = struct {
		Results []database.Package `json:"results"`
	}{
		pkgs,
	}
	resp.Header().Set("Content-Type", jsonMIMEType)
	return json.NewEncoder(resp).Encode(&data)
}
Пример #4
0
func (db *Database) AddNewCrawl(importPath string) error {
	if !gosrc.IsValidRemotePath(importPath) {
		return errors.New("bad path")
	}
	c := db.Pool.Get()
	defer c.Close()
	_, err := addCrawlScript.Do(c, importPath)
	return err
}
Пример #5
0
// addReferences adds packages referenced in plain text s.
func addReferences(references map[string]bool, s []byte) {
	for _, pat := range referencesPats {
		for _, m := range pat.FindAllSubmatch(s, -1) {
			p := string(m[1])
			if gosrc.IsValidRemotePath(p) {
				references[p] = true
			}
		}
	}
}
Пример #6
0
func serveHome(resp http.ResponseWriter, req *http.Request) error {
	if req.URL.Path != "/" {
		return servePackage(resp, req)
	}

	q := strings.TrimSpace(req.Form.Get("q"))
	if q == "" {
		pkgs, err := popular()
		if err != nil {
			return err
		}

		return executeTemplate(resp, "home"+templateExt(req), http.StatusOK, nil,
			map[string]interface{}{"Popular": pkgs})
	}

	if path, ok := isBrowseURL(q); ok {
		q = path
	}

	if gosrc.IsValidRemotePath(q) || (strings.Contains(q, "/") && gosrc.IsGoRepoPath(q)) {
		pdoc, pkgs, err := getDoc(q, queryRequest)
		if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
			http.Redirect(resp, req, "/"+e.Redirect, http.StatusFound)
			return nil
		}
		if err == nil && (pdoc != nil || len(pkgs) > 0) {
			http.Redirect(resp, req, "/"+q, http.StatusFound)
			return nil
		}
	}

	ctx := appengine.NewContext(req)
	pkgs, err := database.Search(ctx, q)
	if err != nil {
		return err
	}
	if gceLogger != nil {
		// Log up to top 10 packages we served upon a search.
		logPkgs := pkgs
		if len(pkgs) > 10 {
			logPkgs = pkgs[:10]
		}
		gceLogger.LogEvent(resp, req, logPkgs)
	}

	return executeTemplate(resp, "results"+templateExt(req), http.StatusOK, nil,
		map[string]interface{}{"q": q, "pkgs": pkgs})
}
Пример #7
0
func serveHome(resp http.ResponseWriter, req *http.Request) error {
	if req.URL.Path != "/" {
		return servePackage(resp, req)
	}

	q := strings.TrimSpace(req.Form.Get("q"))
	if q == "" {
		pkgs, err := popular()
		if err != nil {
			return err
		}

		return executeTemplate(resp, "home"+templateExt(req), http.StatusOK, nil,
			map[string]interface{}{"Popular": pkgs})
	}

	if path, ok := isBrowseURL(q); ok {
		q = path
	}

	if gosrc.IsValidRemotePath(q) || (strings.Contains(q, "/") && gosrc.IsGoRepoPath(q)) {
		pdoc, pkgs, err := getDoc(q, queryRequest)
		if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
			http.Redirect(resp, req, "/"+e.Redirect, http.StatusFound)
			return nil
		}
		if err == nil && (pdoc != nil || len(pkgs) > 0) {
			http.Redirect(resp, req, "/"+q, http.StatusFound)
			return nil
		}
	}

	pkgs, err := db.Query(q)
	if err != nil {
		return err
	}

	return executeTemplate(resp, "results"+templateExt(req), http.StatusOK, nil,
		map[string]interface{}{"q": q, "pkgs": pkgs})
}
Пример #8
0
// Put adds the package documentation to the database.
func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time, hide bool) error {
	c := db.Pool.Get()
	defer c.Close()

	score := 0.0
	if !hide {
		score = documentScore(pdoc)
	}
	terms := documentTerms(pdoc, score)

	var gobBuf bytes.Buffer
	if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil {
		return err
	}

	// Truncate large documents.
	if gobBuf.Len() > 200000 {
		pdocNew := *pdoc
		pdoc = &pdocNew
		pdoc.Truncated = true
		pdoc.Vars = nil
		pdoc.Funcs = nil
		pdoc.Types = nil
		pdoc.Consts = nil
		pdoc.Examples = nil
		gobBuf.Reset()
		if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil {
			return err
		}
	}

	gobBytes, err := snappy.Encode(nil, gobBuf.Bytes())
	if err != nil {
		return err
	}

	kind := "p"
	switch {
	case pdoc.Name == "":
		kind = "d"
	case pdoc.IsCmd:
		kind = "c"
	}

	t := int64(0)
	if !nextCrawl.IsZero() {
		t = nextCrawl.Unix()
	}

	_, err = putScript.Do(c, pdoc.ImportPath, pdoc.Synopsis, score, gobBytes, strings.Join(terms, " "), pdoc.Etag, kind, t)
	if err != nil {
		return err
	}

	if nextCrawl.IsZero() {
		// Skip crawling related packages if this is not a full save.
		return nil
	}

	paths := make(map[string]bool)
	for _, p := range pdoc.Imports {
		if gosrc.IsValidRemotePath(p) {
			paths[p] = true
		}
	}
	for _, p := range pdoc.TestImports {
		if gosrc.IsValidRemotePath(p) {
			paths[p] = true
		}
	}
	for _, p := range pdoc.XTestImports {
		if gosrc.IsValidRemotePath(p) {
			paths[p] = true
		}
	}
	if pdoc.ImportPath != pdoc.ProjectRoot && pdoc.ProjectRoot != "" {
		paths[pdoc.ProjectRoot] = true
	}
	for _, p := range pdoc.Subdirectories {
		paths[pdoc.ImportPath+"/"+p] = true
	}

	args := make([]interface{}, 0, len(paths))
	for p := range paths {
		args = append(args, p)
	}
	_, err = addCrawlScript.Do(c, args...)
	return err
}
Пример #9
0
// Put adds the package documentation to the database.
func (db *Database) Put(pdoc *doc.Package, nextCrawl time.Time, hide bool) error {
	c := db.Pool.Get()
	defer c.Close()

	score := 0.0
	if !hide {
		score = documentScore(pdoc)
	}
	terms := documentTerms(pdoc, score)

	var gobBuf bytes.Buffer
	if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil {
		return err
	}

	gobBytes := snappy.Encode(nil, gobBuf.Bytes())

	// Truncate large documents.
	if len(gobBytes) > 400000 {
		pdocNew := *pdoc
		pdoc = &pdocNew
		pdoc.Truncated = true
		pdoc.Vars = nil
		pdoc.Funcs = nil
		pdoc.Types = nil
		pdoc.Consts = nil
		pdoc.Examples = nil
		gobBuf.Reset()
		if err := gob.NewEncoder(&gobBuf).Encode(pdoc); err != nil {
			return err
		}
		gobBytes = snappy.Encode(nil, gobBuf.Bytes())
	}

	kind := "p"
	switch {
	case pdoc.Name == "":
		kind = "d"
	case pdoc.IsCmd:
		kind = "c"
	}

	t := int64(0)
	if !nextCrawl.IsZero() {
		t = nextCrawl.Unix()
	}

	// Get old version of the package to extract its imports.
	// If the package does not exist, both oldDoc and err will be nil.
	old, _, err := db.getDoc(c, pdoc.ImportPath)
	if err != nil {
		return err
	}

	_, err = putScript.Do(c, pdoc.ImportPath, pdoc.Synopsis, score, gobBytes, strings.Join(terms, " "), pdoc.Etag, kind, t)
	if err != nil {
		return err
	}

	id, n, err := pkgIDAndImportCount(c, pdoc.ImportPath)
	if err != nil {
		return err
	}
	ctx := bgCtx()

	if score > 0 {
		if err := PutIndex(ctx, pdoc, id, score, n); err != nil {
			log.Printf("Cannot put %q in index: %v", pdoc.ImportPath, err)
		}

		if old != nil {
			if err := updateImportsIndex(c, ctx, old, pdoc); err != nil {
				return err
			}
		}
	} else {
		if err := deleteIndex(ctx, id); err != nil {
			return err
		}
	}

	if nextCrawl.IsZero() {
		// Skip crawling related packages if this is not a full save.
		return nil
	}

	paths := make(map[string]bool)
	for _, p := range pdoc.Imports {
		if gosrc.IsValidRemotePath(p) {
			paths[p] = true
		}
	}
	for _, p := range pdoc.TestImports {
		if gosrc.IsValidRemotePath(p) {
			paths[p] = true
		}
	}
	for _, p := range pdoc.XTestImports {
		if gosrc.IsValidRemotePath(p) {
			paths[p] = true
		}
	}
	if pdoc.ImportPath != pdoc.ProjectRoot && pdoc.ProjectRoot != "" {
		paths[pdoc.ProjectRoot] = true
	}
	for _, p := range pdoc.Subdirectories {
		paths[pdoc.ImportPath+"/"+p] = true
	}

	args := make([]interface{}, 0, len(paths))
	for p := range paths {
		args = append(args, p)
	}
	_, err = addCrawlScript.Do(c, args...)
	return err
}