func exportBuilds(db *db.DB, cols []string) { oneDayAgo := time.Now().UTC().Add(-24 * time.Hour) sevenDaysAgo := time.Now().UTC().Add(-7 * 24 * time.Hour) for _, col := range cols { d, err := util.ParseBuildTime(col) if err != nil { continue } // skipping in progress collection if d.UTC().After(oneDayAgo) { continue } outfile, err := exportC(col) if err != nil { log.Println(err) continue } outzip, err := archiveFile(outfile) if err != nil { log.Println(err) continue } err = uploadZipfile(outzip) if err != nil { log.Println(err) continue } // only drop collections that are 3 days old if d.UTC().Before(sevenDaysAgo) { err = dropC(db, col) if err != nil { log.Println(err) continue } } } }
func (s3FS *S3) List(destPath string) (files []File, err error) { destPath = strings.TrimPrefix(destPath, "/") destPath = destPath + "/" results, err := s3FS.Bucket.List(destPath, "/", "", 1000) for _, c := range results.Contents { name := strings.TrimPrefix(c.Key, "builds/") if name == "" { continue } nameToParse := strings.TrimSuffix(name, filepath.Ext(name)) t, err := util.ParseBuildTime(nameToParse) if err != nil { continue } uri := fmt.Sprintf("https://s3.amazonaws.com/travisarchive/%s", c.Key) file := File{Name: name, Time: t, URI: uri} files = append(files, file) } return }