func main() { log.Println("Running tocrawl tool, to generate crawling list") log.Println("NonCrawlHosts: ", gcse.NonCrawlHosts) log.Println("CrawlGithubUpdate: ", gcse.CrawlGithubUpdate) log.Println("CrawlByGodocApi: ", gcse.CrawlByGodocApi) // Load CrawlerDB cDB = gcse.LoadCrawlerDB() if gcse.CrawlGithubUpdate || gcse.CrawlByGodocApi { // load pkgUTs pkgUTs, err := loadPackageUpdateTimes( sophie.LocalFsPath(gcse.DocsDBPath.S())) if err != nil { log.Fatalf("loadPackageUpdateTimes failed: %v", err) } if gcse.CrawlGithubUpdate { touchByGithubUpdates(pkgUTs) } if gcse.CrawlByGodocApi { httpClient := gcse.GenHttpClient("") pkgs, err := gcse.FetchAllPackagesInGodoc(httpClient) if err != nil { log.Fatalf("FetchAllPackagesInGodoc failed: %v", err) } log.Printf("FetchAllPackagesInGodoc returns %d entries", len(pkgs)) for _, pkg := range pkgs { cDB.AppendPackage(pkg, func(pkg string) bool { _, ok := pkgUTs[pkg] return ok }) } } syncDatabases() } log.Printf("Package DB: %d entries", cDB.PackageDB.Count()) log.Printf("Person DB: %d entries", cDB.PersonDB.Count()) pathToCrawl := gcse.DataRoot.Join(gcse.FnToCrawl) kvPackage := kv.DirOutput(sophie.LocalFsPath( pathToCrawl.Join(gcse.FnPackage).S())) kvPackage.Clean() if err := generateCrawlEntries(cDB.PackageDB, gcse.HostOfPackage, kvPackage); err != nil { log.Fatalf("generateCrawlEntries %v failed: %v", kvPackage.Path, err) } kvPerson := kv.DirOutput(sophie.LocalFsPath( pathToCrawl.Join(gcse.FnPerson).S())) kvPerson.Clean() if err := generateCrawlEntries(cDB.PersonDB, func(id string) string { site, _ := gcse.ParsePersonId(id) return site }, kvPerson); err != nil { log.Fatalf("generateCrawlEntries %v failed: %v", kvPerson.Path, err) } }
func main() { log.Println("Running tocrawl tool, to generate crawling list") log.Println("NonCrawlHosts: ", configs.NonCrawlHosts) log.Println("CrawlGithubUpdate: ", configs.CrawlGithubUpdate) log.Println("CrawlByGodocApi: ", configs.CrawlByGodocApi) log.Printf("Using personal: %v", configs.CrawlerGithubPersonal) gcse.GithubSpider = github.NewSpiderWithToken(configs.CrawlerGithubPersonal) // Load CrawlerDB cDB = gcse.LoadCrawlerDB() if configs.CrawlGithubUpdate || configs.CrawlByGodocApi { // load pkgUTs pkgUTs, err := loadPackageUpdateTimes( sophie.LocalFsPath(configs.DocsDBPath().S())) if err != nil { log.Fatalf("loadPackageUpdateTimes failed: %v", err) } if configs.CrawlGithubUpdate { touchByGithubUpdates(pkgUTs) } if configs.CrawlByGodocApi { httpClient := gcse.GenHttpClient("") pkgs, err := gcse.FetchAllPackagesInGodoc(httpClient) if err != nil { log.Fatalf("FetchAllPackagesInGodoc failed: %v", err) } gcse.AddBiValueAndProcess(bi.Max, "godoc.doc-count", len(pkgs)) log.Printf("FetchAllPackagesInGodoc returns %d entries", len(pkgs)) now := time.Now() for _, pkg := range pkgs { cDB.AppendPackage(pkg, func(pkg string) bool { _, ok := pkgUTs[pkg] return ok }) site, path := utils.SplitPackage(pkg) if err := store.AppendPackageEvent(site, path, "godoc", now, sppb.HistoryEvent_Action_None); err != nil { log.Printf("UpdatePackageHistory %s %s failed: %v", site, path, err) } } } syncDatabases() } log.Printf("Package DB: %d entries", cDB.PackageDB.Count()) log.Printf("Person DB: %d entries", cDB.PersonDB.Count()) pathToCrawl := configs.DataRoot.Join(configs.FnToCrawl) kvPackage := kv.DirOutput(sophie.LocalFsPath( pathToCrawl.Join(configs.FnPackage).S())) kvPackage.Clean() if err := generateCrawlEntries(cDB.PackageDB, gcse.HostOfPackage, kvPackage); err != nil { log.Fatalf("generateCrawlEntries %v failed: %v", kvPackage.Path, err) } kvPerson := kv.DirOutput(sophie.LocalFsPath( pathToCrawl.Join(configs.FnPerson).S())) kvPerson.Clean() if err := generateCrawlEntries(cDB.PersonDB, func(id string) string { site, _ := gcse.ParsePersonId(id) return site }, kvPerson); err != nil { log.Fatalf("generateCrawlEntries %v failed: %v", kvPerson.Path, err) } }