func (a *archiver) stage2HashLoop() { defer close(a.stage3LookupChan) pool := common.NewGoroutinePriorityPool(a.maxConcurrentContains, a.canceler) defer func() { _ = pool.Wait() }() for file := range a.stage2HashChan { // This loop will implicitly buffer when stage1 is too fast by creating a // lot of hung goroutines in pool. This permits reducing the contention on // a.closeLock. // TODO(tandrii): Implement backpressure in GoroutinePool, e.g. when it // exceeds 20k or something similar. item := file pool.Schedule(item.priority, func() { // calcDigest calls setErr() and update wgHashed even on failure. end := tracer.Span(a, "hash", tracer.Args{"name": item.DisplayName()}) if err := item.calcDigest(); err != nil { end(tracer.Args{"err": err}) a.Cancel(err) item.Close() return } end(tracer.Args{"size": float64(item.digestItem.Size)}) tracer.CounterAdd(a, "bytesHashed", float64(item.digestItem.Size)) a.progress.Update(groupHash, groupHashDone, 1) a.progress.Update(groupHash, groupHashDoneSize, item.digestItem.Size) a.progress.Update(groupLookup, groupLookupTodo, 1) a.stage3LookupChan <- item }, func() { item.setErr(a.CancelationReason()) item.wgHashed.Done() item.Close() }) } }
func (a *archiver) push(item *archiverItem) Future { if a.pushLocked(item) { tracer.Instant(a, "itemAdded", tracer.Thread, tracer.Args{"item": item.DisplayName()}) tracer.CounterAdd(a, "itemsAdded", 1) a.progress.Update(groupFound, groupFoundFound, 1) return item } item.Close() return nil }
func (i *isolateServer) doPush(state *PushState, src io.ReadSeeker) (err error) { useDB := state.status.GSUploadURL == "" end := tracer.Span(i, "push", tracer.Args{"useDB": useDB, "size": state.size}) defer func() { end(tracer.Args{"err": err}) }() if useDB { err = i.doPushDB(state, src) } else { err = i.doPushGCS(state, src) } if err != nil { tracer.CounterAdd(i, "bytesUploaded", float64(state.size)) } return err }
func (i *archiverItem) Close() error { tracer.CounterAdd(i.a, "itemsProcessing", -1) i.a = nil return nil }
func newArchiverItem(a *archiver, displayName, path string, src io.ReadSeeker, priority int64) *archiverItem { tracer.CounterAdd(a, "itemsProcessing", 1) i := &archiverItem{a: a, displayName: displayName, path: path, src: src, priority: priority} i.wgHashed.Add(1) return i }