func main() { flag.Parse() logging.InitFromFlags() // Initialise bot state bot.Init() // Connect to mongo db.Init() defer db.Close() // Add drivers calcdriver.Init() decisiondriver.Init() factdriver.Init() karmadriver.Init() markovdriver.Init() quotedriver.Init() reminddriver.Init() seendriver.Init() urldriver.Init() // Start up the HTTP server go http.ListenAndServe(*httpPort, nil) // Connect the bot to IRC and wait; reconnects are handled automatically. // If we get true back from the bot, re-exec the (rebuilt) binary. if bot.Connect() { // Calling syscall.Exec probably means deferred functions won't get // called, so disconnect from mongodb first for politeness' sake. db.Close() // If sp0rkle was run from PATH, we need to do that lookup manually. fq, _ := exec.LookPath(os.Args[0]) logging.Warn("Re-executing sp0rkle with args '%v'.", os.Args) err := syscall.Exec(fq, os.Args, os.Environ()) if err != nil { // hmmmmmm logging.Fatal("Couldn't re-exec sp0rkle: %v", err) } } logging.Info("Shutting down cleanly.") }
func main() { flag.Parse() logging.InitFromFlags() // Let's go find some mongo. db.Init() defer db.Close() qc := quotes.Init() // A communication channel of Quotes. quotes := make(chan *quotes.Quote) rows := make(chan []interface{}) // Function to feed rows into the rows channel. row_feeder := func(sth *sqlite3.Statement, row ...interface{}) { rows <- row } // Function to execute a query on the SQLite db. db_query := func(dbh *sqlite3.Database) { n, err := dbh.Execute("SELECT * FROM Quotes;", row_feeder) if err == nil { logging.Info("Read %d rows from database.\n", n) } else { logging.Error("DB error: %s\n", err) } } // Open up the quote database in a goroutine and feed rows // in on the input_rows channel. go func() { sqlite3.Session(*file, db_query) // once we've done the query, close the channel to indicate this close(rows) }() // Another goroutine to munge the rows into quotes. // This was originally done inside the SQLite callbacks, but // cgo or sqlite3 obscures runtime panics and makes fail happen. go func() { for row := range rows { parseQuote(row, quotes) } close(quotes) }() // And finally... count := 0 var err error for quote := range quotes { // ... push each quote into mongo err = qc.Insert(quote) if err != nil { logging.Error("Awww: %v\n", err) } else { if count%1000 == 0 { fmt.Printf("%d...", count) } count++ } } fmt.Println("done.") logging.Info("Inserted %d quotes.\n", count) }
func main() { flag.Parse() logging.InitFromFlags() // Let's go find some mongo. db.Init() defer db.Close() fc := factoids.Init() // A communication channel of Factoids. facts := make(chan *factoids.Factoid) ptrs := make(chan []interface{}) rows := make(chan []interface{}) // Function to execute some queries on the SQLite db and shove the results // into the ptrs and rows channels created above. db_query := func(dbh *sqlite3.Database) { _, err := dbh.Execute("SELECT * FROM Factoids WHERE Value LIKE '%*%';", feeder(ptrs)) close(ptrs) if err != nil { logging.Error("DB error: %s", err) } n, err := dbh.Execute("SELECT * FROM Factoids;", feeder(rows)) close(rows) if err == nil { logging.Info("Read %d rows from database.", n) } else { logging.Error("DB error: %s", err) } } go func() { sqlite3.Session(*file, db_query) }() // First, synchronously read all the stuff from the ptrs channel // and build a set of all the factoid keys that are used as pointers for row := range ptrs { for _, val := range parseMultipleValues(toString(row[cValue])) { if key, _, _ := util.FactPointer(val); key != "" { ptrkeys[key] = true } } } // Now run another goroutine to munge the rows into factoids. // This was originally done inside the SQLite callbacks, but // cgo or sqlite3 obscures runtime panics and makes fail happen. go func() { for row := range rows { parseFactoid(row, facts) } close(facts) }() // And finally... count := 0 var err error for fact := range facts { // ... push each fact into mongo err = fc.Insert(fact) if err != nil { logging.Error("Awww: %v\n", err) } else { if count%1000 == 0 { fmt.Printf("%d...", count) } count++ } } fmt.Println("done.") logging.Info("Inserted %d factoids.\n", count) }
func main() { flag.Parse() logging.InitFromFlags() golog.Init() // Slightly more random than 1. rand.Seed(time.Now().UnixNano() * int64(os.Getpid())) // Initialise bot state bot.Init() // Connect to mongo db.Init() defer db.Close() // Add drivers calcdriver.Init() decisiondriver.Init() factdriver.Init() karmadriver.Init() markovdriver.Init() netdriver.Init() quotedriver.Init() reminddriver.Init() seendriver.Init() statsdriver.Init() urldriver.Init() // Start up the HTTP server go http.ListenAndServe(*httpPort, nil) // Set up a signal handler to shut things down gracefully. // NOTE: net/http doesn't provide for graceful shutdown :-/ go func() { called := new(int32) sigint := make(chan os.Signal, 1) signal.Notify(sigint, syscall.SIGINT) for _ = range sigint { if atomic.AddInt32(called, 1) > 1 { logging.Fatal("Recieved multiple interrupts, dying.") } bot.Shutdown() } }() // Connect the bot to IRC and wait; reconnects are handled automatically. // If we get true back from the bot, re-exec the (rebuilt) binary. if <-bot.Connect() { // Calling syscall.Exec probably means deferred functions won't get // called, so disconnect from mongodb first for politeness' sake. db.Close() // If sp0rkle was run from PATH, we need to do that lookup manually. fq, _ := exec.LookPath(os.Args[0]) logging.Warn("Re-executing sp0rkle with args '%v'.", os.Args) err := syscall.Exec(fq, os.Args, os.Environ()) if err != nil { // hmmmmmm logging.Fatal("Couldn't re-exec sp0rkle: %v", err) } } logging.Info("Shutting down cleanly.") }
func main() { flag.Parse() logging.InitFromFlags() // Let's go find some mongo. db.Init() defer db.Close() uc := urls.Init() work := make(chan *urls.Url) quit := make(chan bool) urls := make(chan *urls.Url) rows := make(chan []interface{}) failed := 0 // If we're checking, spin up some workers if *check { for i := 1; i <= *workq; i++ { go func(n int) { count := 0 for u := range work { count++ logging.Debug("w%02d r%04d: Fetching '%s'", n, count, u.Url) res, err := http.Head(u.Url) logging.Debug("w%02d r%04d: Response '%s'", n, count, res.Status) if err == nil && res.StatusCode == 200 { urls <- u } else { failed++ } } quit <- true }(i) } } // Function to feed rows into the rows channel. row_feeder := func(sth *sqlite3.Statement, row ...interface{}) { rows <- row } // Function to execute a query on the SQLite db. db_query := func(dbh *sqlite3.Database) { n, err := dbh.Execute("SELECT * FROM urls;", row_feeder) if err == nil { logging.Info("Read %d rows from database.\n", n) } else { logging.Error("DB error: %s\n", err) } } // Open up the URL database in a goroutine and feed rows // in on the input_rows channel. go func() { sqlite3.Session(*file, db_query) // once we've done the query, close the channel to indicate this close(rows) }() // Another goroutine to munge the rows into Urls and optionally send // them to the pool of checker goroutines. go func() { for row := range rows { u := parseUrl(row) if *check { work <- u } else { urls <- u } } if *check { // Close work channel and wait for all workers to quit. close(work) for i := 0; i < *workq; i++ { <-quit } } close(urls) }() // And finally... count := 0 var err error for u := range urls { // ... push each url into mongo err = uc.Insert(u) if err != nil { logging.Error("Awww: %v\n", err) } else { if count%1000 == 0 { fmt.Printf("%d...", count) } count++ } } fmt.Println("done.") if *check { logging.Info("Dropped %d non-200 urls.", failed) } logging.Info("Inserted %d urls.", count) }