Example #1
0
func main() {
	flag.Parse()
	fmt.Println("Hentai@Home", version)

	if err := createDirIfNotExists(); err != nil {
		log.Fatal("hath: error while checking directory", dir, err)
	}

	frontend := hath.NewFrontend(dir)
	db, err := hath.NewDB(path.Join(dir, "hath.db"))
	if err != nil {
		log.Fatal(err)
	}
	filesInDB := db.Count()
	log.Println("hath:", "files in database:", filesInDB)

	credentials := hath.Credentials{ClientID: clientID, Key: clientKey}
	cfg := hath.ServerConfig{}
	if len(credentialsPath) != 0 {
		f, err := os.Open(credentialsPath)
		if err != nil {
			log.Fatal(err)
		}
		_, err = toml.DecodeReader(f, &credentials)
		if err != nil {
			log.Fatal(err)
		}
		log.Println("hath: credentials loaded from", credentialsPath)
	}
	cfg.Credentials = credentials
	cfg.Frontend = frontend
	cfg.DataBase = db
	if debug {
		cfg.DontCheckTimestamps = true
		cfg.DontCheckSHA1 = true
		cfg.Debug = true
	} else {
		gin.SetMode(gin.ReleaseMode)
	}

	log.Println("hath:", "starting")
	s := hath.NewServer(cfg)

	// profiling endpoint
	// if debug {
	go func() {
		log.Println(http.ListenAndServe("localhost:6060", nil))
	}()
	// }

	// populating database from disk
	if filesInDB == 0 || scan {
		log.Println("server:", "database is empty; trying to scan files in cache")
		if err := s.PopulateFromFrontend(); err != nil {
			log.Fatalln("server:", "failed to scan files and add them to db:", err)
		}
		log.Println("server:", "cache scanned")
	}

	closer.Bind(func() {
		s.Close()
	})

	// starting server
	if err := s.Start(); err != nil {
		log.Fatal(err)
	}
	log.Fatal(s.Listen())
	closer.Hold()
}
Example #2
0
func main() {
	go func() {
		log.Println(http.ListenAndServe("localhost:6060", nil))
	}()
	flag.Parse()
	runtime.GOMAXPROCS(cpus)
	if *cpuprofile != "" {
		f, err := os.Create(*cpuprofile)
		if err != nil {
			log.Fatal(err)
		}
		pprof.StartCPUProfile(f)
		defer pprof.StopCPUProfile()
	}
	g := hath.FileGenerator{
		SizeMax:       sizeMax,
		SizeMin:       sizeMin,
		ResolutionMax: resMax,
		ResolutionMin: resMin,
		TimeDelta:     20,
	}
	if onlyMemory {
		log.Println("allocating")
		var files = make([]hath.File, count)
		log.Println("generating")
		for i := range files {
			files[i] = g.NewFake()
		}
		log.Println("generated", len(files))
		fmt.Scanln()
		start := time.Now()
		log.Println("iterating...")
		var count int

		fileBytes := len(g.NewFake().Bytes())
		for _, f := range files {
			if f.Static {
				count++
			}
		}
		end := time.Now()
		duration := end.Sub(start)
		rate := float64(len(files)) / duration.Seconds()
		fmt.Println("static", count)
		fmt.Printf("OK for %v at rate %f per second\n", duration, rate)
		log.Println("iterated")

		log.Println("Writing dump")
		start = time.Now()
		d, err := os.Create("dump.mem")
		if err != nil {
			log.Fatal(err)
		}
		var totalSize int
		for _, f := range files {
			n, err := d.Write(f.Bytes())
			if err != nil {
				log.Fatal(err)
			}
			totalSize += n
		}
		d.Close()
		end = time.Now()
		duration = end.Sub(start)
		rate = float64(len(files)) / duration.Seconds()
		fmt.Println("static", count)
		fmt.Printf("OK for %v at rate %f per second\n", duration, rate)
		log.Println("written", totalSize)
		log.Println("reading dump")
		d, err = os.Open("dump.mem")
		defer d.Close()
		if err != nil {
			log.Fatal(err)
		}
		var buffer = make([]byte, fileBytes)
		var f hath.File
		start = time.Now()
		for {
			_, err := d.Read(buffer)
			if err == io.EOF {
				break
			}
			if err != nil {
				log.Fatal(err)
			}
			if err = hath.UnmarshalFileTo(buffer, &f); err != nil {
				log.Fatal(err)
			}
		}
		end = time.Now()
		duration = end.Sub(start)
		rate = float64(len(files)) / duration.Seconds()
		fmt.Println("static", count)
		fmt.Printf("OK for %v at rate %f per second\n", duration, rate)
		log.Println("read")
		os.Exit(0)
	}
	if onlyArray {
		log.Println("allocating")
		var ids = make([][hath.HashSize]byte, count)
		log.Println("allocated")
		var f hath.File
		for i := range ids {
			f = g.NewFake()
			ids[i] = f.Hash
		}

	}
	db, err := hath.NewDB(dbpath)
	defer db.Close()
	if err != nil {
		log.Fatal(err)
	}
	d, err := g.NewFake().Marshal()
	if err != nil {
		log.Fatal(err)
	}
	log.Println("average file info is", len(d), "bytes")
	log.Printf("%x", d)
	if onlyOpen {
		log.Println("only open. Waiting for 10s")
		count = int64(db.Count())
		fmt.Scanln()
		start := time.Now()
		n, err := db.GetOldFilesCount(time.Now().Add(time.Second * -10))
		if err != nil {
			log.Fatal(err)
		}
		log.Println("got", n)
		end := time.Now()
		duration := end.Sub(start)
		rate := float64(count) / duration.Seconds()
		log.Printf("Scanned %d for %v at rate %f per second\n", count, duration, rate)
		fmt.Scanln()
	}
	if generate {
		log.Println("generating", count, "files")

		fmt.Printf("%+v\n", g)
		var i int64
		files := make([]hath.File, count)
		for i = 0; i < count; i++ {
			files[i] = g.NewFake()
		}
		start := time.Now()
		if count < bulkSize {
			log.Println("writing")
			if err := db.AddBatch(files); err != nil {
				log.Fatal(err)
			}
		} else {
			log.Println("writing in bulks")
			for i = 0; i+bulkSize < count; i += bulkSize {
				bulkstart := time.Now()

				if err := db.AddBatch(files[i : i+bulkSize]); err != nil {
					log.Fatal(err)
				}

				log.Println("from", i, "to", i+bulkSize, time.Now().Sub(bulkstart))
			}
			log.Println("from", i+bulkSize, "to", count)
			if err := db.AddBatch(files[i:]); err != nil {
				log.Fatal(err)
			}
		}
		end := time.Now()
		duration := end.Sub(start)
		rate := float64(count) / duration.Seconds()
		fmt.Printf("OK for %v at rate %f per second\n", duration, rate)
	}
	// if collect {
	// 	log.Println("collecting")
	// 	start := time.Now()
	// 	n, err := db.Collect(time.Now().Add(-time.Second))
	// 	if err != nil {
	// 		log.Fatal(err)
	// 	}
	//
	// 	end := time.Now()
	// 	duration := end.Sub(start)
	// 	rate := float64(n) / duration.Seconds()
	// 	fmt.Printf("Removed %d for %v at rate %f per second\n", n, duration, rate)
	// }
	log.Println(count, "is rought", bytefmt.ByteSize(hath.GetRoughCacheSize(count)))
	log.Println("OK")
}