func BenchmarkChannelCacheUniqueDocs_Unordered(b *testing.B) { base.SetLogLevel(2) // disables logging context, _ := NewDatabaseContext("db", testBucket(), false, CacheOptions{}) cache := newChannelCache(context, "Benchmark", 0) // generate docs docs := make([]*LogEntry, b.N) r := rand.New(rand.NewSource(99)) for i := 0; i < b.N; i++ { docs[i] = e(uint64(i), fmt.Sprintf("long_document_id_for_sufficient_equals_complexity_%012d", i), "1-a") } // shuffle sequences for i := b.N - 1; i >= 0; i-- { j := int(r.Float64() * float64(b.N)) oldSeq := docs[i].Sequence docs[i].Sequence = docs[j].Sequence docs[j].Sequence = oldSeq } b.ResetTimer() for i := 0; i < b.N; i++ { cache.addToCache(docs[i], false) } }
func BenchmarkChannelCacheRepeatedDocs80(b *testing.B) { base.SetLogLevel(2) // disables logging context, _ := NewDatabaseContext("db", testBucket(), false, CacheOptions{}) cache := newChannelCache(context, "Benchmark", 0) // generate doc IDs docIDs, revStrings := generateDocs(80.0, b.N) b.ResetTimer() for i := 0; i < b.N; i++ { cache.addToCache(e(uint64(i), docIDs[i], revStrings[i]), false) } }
func BenchmarkChannelCacheRepeatedDocs5(b *testing.B) { base.SetLogLevel(2) // disables logging context := testBucketContext() cache := newChannelCache(context, "Benchmark", 0) // generate doc IDs docIDs, revStrings := generateDocs(5.0, b.N) b.ResetTimer() for i := 0; i < b.N; i++ { cache.addToCache(e(uint64(i), docIDs[i], revStrings[i]), false) } }
func BenchmarkDatabase(b *testing.B) { base.SetLogLevel(2) // disables logging for i := 0; i < b.N; i++ { bucket, _ := ConnectToBucket(base.BucketSpec{ Server: kTestURL, BucketName: fmt.Sprintf("b-%d", i)}, nil) context, _ := NewDatabaseContext("db", bucket, false, DatabaseContextOptions{}) db, _ := CreateDatabase(context) body := Body{"key1": "value1", "key2": 1234} db.Put(fmt.Sprintf("doc%d", i), body) db.Close() } }
func BenchmarkChannelCacheUniqueDocs_Ordered(b *testing.B) { base.SetLogLevel(2) // disables logging //base.SetLogLevel(2) // disables logging context, _ := NewDatabaseContext("db", testBucket(), false, CacheOptions{}) cache := newChannelCache(context, "Benchmark", 0) // generate doc IDs docIDs := make([]string, b.N) for i := 0; i < b.N; i++ { docIDs[i] = fmt.Sprintf("long_document_id_for_sufficient_equals_complexity_%012d", i) } b.ResetTimer() for i := 0; i < b.N; i++ { cache.addToCache(e(uint64(i), docIDs[i], "1-a"), false) } }
func (h *handler) handleSetLogging() error { body, err := h.readBody() if err != nil { return nil } if h.getQuery("level") != "" { base.SetLogLevel(int(getRestrictedIntQuery(h.rq.URL.Query(), "level", uint64(base.LogLevel()), 1, 3, false))) if len(body) == 0 { return nil // empty body is OK if request is just setting the log level } } var keys map[string]bool if err := json.Unmarshal(body, &keys); err != nil { return base.HTTPErrorf(http.StatusBadRequest, "Invalid JSON or non-boolean values") } base.UpdateLogKeys(keys, h.rq.Method == "PUT") return nil }