//download image from town func (t *Townparser) downloadImage(url string, name string) error { defer func() { if err := recover(); err != nil { log.Info("%s recovered from panic", TAG) return } }() if url == "0" { return nil } exist, err := exists("templates/static/images/" + name + ".jpg") if err != nil { log.Error("%s %s", TAG, err.Error()) } if !exist { resp, err := t.Tc.Get(url) if err != nil { log.Error("%s image download failed, name: %v, url: %v", TAG, name, url) log.Error("%s %s", TAG, err.Error()) return err } defer resp.Body.Close() if strings.Contains(url, "jpg") || strings.Contains(url, "jpeg") { img, _ := jpeg.Decode(resp.Body) m := resize.Resize(300, 0, img, resize.Lanczos2Lut) out, err := os.Create("templates/static/images/" + name + ".jpg") if err != nil { log.Error("%s %s", TAG, err.Error()) return nil } // write new image to file jpeg.Encode(out, m, nil) out.Close() } else if strings.Contains(url, "png") { img, err := png.Decode(resp.Body) if err != nil { log.Error("%s %s", TAG, err.Error()) } m := resize.Resize(300, 0, img, resize.Lanczos2Lut) out, err := os.Create("templates/static/images/" + name + ".png") if err != nil { log.Error("%s %s", TAG, err.Error()) return nil } // write new image to file jpeg.Encode(out, m, nil) out.Close() } } time.Sleep(200 * time.Millisecond) return nil }
//run all the mappers in parallel func (mr *MapReduceJob) runMappers() { mappersGroup := sync.WaitGroup{} //open up N mappers in parallel go routines for i := 0; i < mr.numMappers; i++ { //we add each mapper to the waitgroup before starting, then when it finishes it calls Done //this way we can wait for the mappers to finish safely mappersGroup.Add(1) logging.Info("Starting mapper %d", i) go func(i int) { for record := range mr.inputChan { (*mr.mapReducer).Map(record, mr.mappersOutputChan) } mappersGroup.Done() logging.Info("Mapper %d done", i) }(i) } //wait for the mappers to finish up and then close their output channel logging.Info("Waiting for mappers to finish") mappersGroup.Wait() logging.Info("All mappers finished...") close(mr.mappersOutputChan) }
func (t *Townclient) getSValue() (sValue string) { log.Info("%s getting sValue for town login", TAG) sValue = "" var doc *goquery.Document var e error log.Info("%s[GET] url: %v", TAG, ROOT) if doc, e = goquery.NewDocument(ROOT); e != nil { log.Error("%s %s", TAG, e.Error()) return } doc.Find("input").Each(func(i int, s *goquery.Selection) { attr, exists := s.Attr("name") if exists == true { if attr == "s" { bla, exists := s.Attr("value") if exists == true { sValue = bla } } } }) log.Info("%s sValue: %v", TAG, sValue) return sValue }
func (g *Ghostclient) getFirstTimeShit() error { log.Info("%s[GET] url: %v", TAG, LOGIN) log.Info("%s getting cookies", TAG) client := &http.Client{} req, err := http.NewRequest("GET", LOGIN, nil) if err != nil { return err } req.Header.Add("Accept", "text/html, application/xhtml+xml, */*") req.Header.Add("Accept-Language", "de-DE") req.Header.Add("User-Agent", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/6.0)") req.Header.Add("Accept-Encoding", "gzip, deflate") req.Header.Add("Connection", "Keep-Alive") req.Header.Add("Host", "ghost-of-usenet.org") time1 := time.Now() g.dumpRequest(req, "ghost_first_req_"+strconv.Itoa(time1.Nanosecond())) //connect to sUrl resp, err := client.Do(req) if err != nil { return err } g.cookies = resp.Cookies() g.dumpResponse(resp, "ghost_first_resp_"+strconv.Itoa(time1.Nanosecond())) return nil }
func (t *Townmanager) Start() { t.page = 1 t.end = false tc := &Townclient{} tc.User = t.User tc.Password = t.Password err := t.init(tc) log.Info("%s init finished, starting to parse...", TAG) if err != nil { log.Error("%s init failed", TAG) log.Error("%s %s", TAG, err.Error()) return } tp := &Townparser{Url: t.url, Tc: tc} count, err := tp.ParsePageCount() if err != nil { log.Error("%s %s", TAG, err.Error()) return } t.maxpage = count log.Info("%s crawling approximately %v pages", TAG, t.maxpage) t.saveReleases(tp.Rel) i := 1 for { if i == 1 { err = tp.ParseReleases(false) if err != nil { log.Error("%s %s", TAG, err.Error()) break } } else { tp = nil tp = &Townparser{Url: t.url + "&pp=25&page=" + strconv.Itoa(i), Tc: tc} err = tp.ParseReleases(true) if err != nil { log.Error("%s %s", TAG, err.Error()) break } } log.Info("%s crawled page %v/%v", TAG, i, t.maxpage) t.saveReleases(tp.Rel) time.Sleep(5 * time.Second) i++ if i == t.maxpage+1 { break } if t.end { log.Info("%s found old end point", TAG) break } } log.Info("%s parser closing", TAG) }
func GoRuntimeStats() { m := &runtime.MemStats{} log.Info("# goroutines: %v", runtime.NumGoroutine()) runtime.ReadMemStats(m) log.Info("Memory Acquired: %vmb", (m.Sys / 1024 / 1024)) log.Info("Memory Used : %vmb", (m.Alloc / 1024 / 1024)) }
// configure registers the API's routes on a router. If the passed router is nil, we create a new one and return it. // The nil mode is used when an API is run in stand-alone mode. func (a *API) configure(router *httprouter.Router) *httprouter.Router { if router == nil { router = httprouter.New() } for i, route := range a.Routes { if err := route.parseInfo(route.Path); err != nil { logging.Error("Error parsing info for %s: %s", route.Path, err) } a.Routes[i] = route h := a.handler(route) pth := a.FullPath(route.Path) if route.Methods&GET == GET { logging.Info("Registering GET handler %v to path %s", h, pth) router.Handle("GET", pth, h) } if route.Methods&POST == POST { logging.Info("Registering POST handler %v to path %s", h, pth) router.Handle("POST", pth, h) } } chain := buildChain(a.SwaggerMiddleware...) if chain == nil { chain = buildChain(a.swaggerHandler()) } else { chain.append(a.swaggerHandler()) } // Server the API documentation swagger router.GET(a.FullPath("/swagger"), a.middlewareHandler(chain, nil, nil)) chain = buildChain(a.TestMiddleware...) if chain == nil { chain = buildChain(a.testHandler()) } else { chain.append(a.testHandler()) } router.GET(path.Join("/test", a.root(), ":category"), a.middlewareHandler(chain, nil, nil)) // Redirect /$api/$version/console => /console?url=/$api/$version/swagger uiPath := fmt.Sprintf("/console?url=%s", url.QueryEscape(a.FullPath("/swagger"))) router.Handler("GET", a.FullPath("/console"), http.RedirectHandler(uiPath, 301)) return router }
func (r *Runner) Start() { go func() { timeout := time.Second * 1 for { select { case <-time.After(timeout): if !r.checkTown() { timeout = time.Minute * 5 log.Info("town: trying login again in %v minute", timeout) } else { tm := town.Townmanager{User: r.Server.Config2.TownName, Password: r.Server.Config2.TownPassword, DB: r.Server.RelDB} go tm.Start() dur, err := time.ParseDuration(r.Server.Config2.Timeout) if err != nil { log.Error(err.Error()) } c := time.Tick(dur) for _ = range c { log.Info("tick start town") go tm.Start() } } } break } return }() go func() { timeout := time.Second * 2 for { select { case <-time.After(timeout): if !r.checkTown() { timeout = time.Minute * 5 log.Info("ghost: trying to login again in %v minute", timeout) } else { gm := ghost.Ghostmanager{User: r.Server.Config2.GhostName, Password: r.Server.Config2.GhostPassword, DB: r.Server.RelDB} go gm.Start() dur, err := time.ParseDuration(r.Server.Config2.Timeout) if err != nil { log.Error(err.Error()) } c := time.Tick(dur) for _ = range c { log.Info("tick start ghost") go gm.Start() } } } break } return }() }
// Logs into town.ag and returns the response cookies func (g *Ghostclient) Login() error { log.Info("%s login process started", TAG) g.getFirstTimeShit() param := url.Values{} param.Set("url", "index.php") param.Add("send", "send") param.Add("sid", "") param.Add("l_username", g.User) param.Add("l_password", g.Password) param.Add("submit", "Anmelden") client := &http.Client{} req, err := http.NewRequest("POST", LOGIN, strings.NewReader(param.Encode())) if err != nil { return err } log.Info("%s[POST] url: %v", TAG, LOGIN) if g.cookies != nil { for _, cookie := range g.cookies { req.AddCookie(cookie) } } req.Header.Add("Accept", "text/html, application/xhtml+xml, */*") req.Header.Add("Referer", "http://ghost-of-usenet.org/index.php") req.Header.Add("Accept-Language", "de-DE") req.Header.Add("User-Agent", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/6.0)") req.Header.Add("Content-Type", "application/x-www-form-urlencoded") req.Header.Add("Accept-Encoding", "gzip, deflate") req.Header.Add("Host", "ghost-of-usenet.org") length := strconv.Itoa(len(param.Encode())) req.Header.Add("Content-Length", length) req.Header.Add("Connection", "Keep-Alive") req.Header.Add("Pragma", "no-cache") g.dumpRequest(req, "town_login_req") resp, err := client.Do(req) if err != nil { return err } defer resp.Body.Close() g.dumpResponse(resp, "town_login_resp") g.cookies = resp.Cookies() return nil }
func (g *Ghostmanager) Start() { g.end = false gc := &Ghostclient{} gc.User = g.User gc.Password = g.Password err := g.init(gc) log.Info("%s init finished, starting to parse...", TAG) if err != nil { log.Error("%s init failed", TAG) log.Error("%s %s", TAG, err.Error()) return } tp := &Ghostparser{Url: g.url, Gc: gc} i := 1 for { if i == 1 { err = tp.ParseReleases() if err != nil { log.Error("%s %s", TAG, err.Error()) break } g.maxpage = tp.Count log.Info("%s crawling approximately %v pages", TAG, g.maxpage) } else { tp = nil tp = &Ghostparser{Url: g.url + "&page=" + strconv.Itoa(i), Gc: gc} err = tp.ParseReleases() if err != nil { log.Error("%s %s", TAG, err.Error()) break } } g.saveReleases(tp.Rel) log.Info("%s crawled page %v/%v", TAG, i, g.maxpage) time.Sleep(5 * time.Second) i++ if i == g.maxpage+1 { break } if g.end { log.Info("%s found old end point", TAG) break } } log.Info("%s closing", TAG) }
//collect the mappers' output into the aggregate dictionary func (mr *MapReduceJob) collectMappersOutput() { logging.Info("Collecting mappers output") tracker := progressTracker(5, "items") for kv := range mr.mappersOutputChan { mr.aggregate[kv.Key] = append(mr.aggregate[kv.Key], kv.Val) tracker <- 1 } close(tracker) logging.Info("FINISHED Collecting mappers output") }
func (r *Route) parseInfo(path string) error { ri, err := schema.NewRequestInfo(reflect.TypeOf(r.Handler), path, r.Description, r.Returns) if err != nil { return err } // search for custom unmarshallers in the request info for _, param := range ri.Params { if param.Type.Kind() == reflect.Struct { logging.Debug("Checking unmarshaller for %s", param.Type) val := reflect.Zero(param.Type).Interface() if unm, ok := val.(Unmarshaler); ok { logging.Info("Registering unmarshaller for %#v", val) schemaDecoder.RegisterConverter(val, gorilla.Converter(func(s string) reflect.Value { return reflect.ValueOf(unm.UnmarshalRequestData(s)) })) } } } r.requestInfo = ri return nil }
//run all steps func (mr *MapReduceJob) Run() { //read the input in a seaparate goroutine go mr.readInput() //aggregate results in a separate goroutine while the mappers are working go mr.collectMappersOutput() //start mappers and block until they are all finished mr.runMappers() //run the output collector in a different goroutine //now run the reducers. For this to advance, some other gorooutine needs to collect output go mr.runReducers() outputHandler := sync.WaitGroup{} outputHandler.Add(1) go func() { (*mr.mapReducer).HandleOutput(mr.OutputChan) outputHandler.Done() }() outputHandler.Wait() logging.Info("Finished!") }
func ReadConfigs() error { if err := autoflag.Load(gofigure.DefaultLoader, &Config); err != nil { logging.Error("Error loading configs: %v", err) return err } logging.Info("Read configs: %#v", &Config) for k, m := range Config.APIConfigs { if conf, found := Config.apiconfs[k]; found && conf != nil { b, err := yaml.Marshal(m) if err == nil { if err := yaml.Unmarshal(b, conf); err != nil { logging.Error("Error reading config for API %s: %s", k, err) } else { logging.Debug("Unmarshaled API config for %s: %#v", k, conf) } } else { logging.Error("Error marshalling config for API %s: %s", k, err) } } else { logging.Warning("API Section %s in config file not registered with server", k) } } return nil }
//http get to the given ressource func (t *Townclient) Get(sUrl string) (*http.Response, error) { if strings.Contains(sUrl, "jpg") || strings.Contains(sUrl, "png") || strings.Contains(sUrl, "gif") || strings.Contains(sUrl, "jpeg") { } else { log.Info("%s[GET] url: %v", TAG, sUrl) } client := &http.Client{} req, err := http.NewRequest("GET", sUrl, nil) if err != nil { log.Error("%s couldn't create Request to: %v", TAG, sUrl) return nil, err } t.addHeader(req) if t.cookies != nil { for _, cookie := range t.cookies { req.AddCookie(cookie) } } time1 := time.Now() t.dumpRequest(req, "town_get_req_"+strconv.Itoa(time1.Nanosecond())) //connect to sUrl resp, err := client.Do(req) if err != nil { log.Error("%s couldn't connect to: %v", TAG, sUrl) return nil, err } t.dumpResponse(resp, "town_get_resp_"+strconv.Itoa(time1.Nanosecond())) return resp, nil }
// Detect if the request is secure or not, based on either TLS info or http headers/url func (r *Request) parseSecure() { logging.Info("Parsing secure. TLS: %v, URI: %s, Headers: %#v", r.TLS, r.RequestURI, r.Header) if r.TLS != nil { r.Secure = true return } if u, err := url.ParseRequestURI(r.RequestURI); err == nil { if u.Scheme == "https" { r.Secure = true return } } xfp := r.Header.Get("X-Forwarded-Proto") if xfp == "" { xfp = r.Header.Get("X-Scheme") } if xfp == "https" { r.Secure = true } }
func (rv *RequestValidator) Validate(request interface{}, r *http.Request) error { val := reflect.ValueOf(request) if val.Kind() == reflect.Ptr { val = val.Elem() } //go over all the validators for _, v := range rv.fieldValidators { // find the field in the struct. we assume it's there since we build the validators on start time field := val.FieldByName(v.GetKey()) // if the arg is optional and not set, we set the default if v.IsOptional() && (!field.IsValid() || r.FormValue(v.GetParamName()) == "") { def, ok := v.GetDefault() if ok { logging.Info("Default value for %s: %v", v.GetKey(), def) field.Set(reflect.ValueOf(def).Convert(field.Type())) } } // now we validate! e := v.Validate(field, r) if e != nil { logging.Error("Could not validate field %s: %s", v.GetParamName(), e) return e } } return nil }
func (c *Cache) freeMemory() { if c.isRunning { return } c.isRunning = true log.Info("[Cache] freeMemory() cachesize: %vmb, size to be freed: %vmb", c.GetSizeInMb(), (c.sizefree / 1024 / 1024)) low := uint32(1) count := c.GetSize() start := time.Now() sec20 := int64(time.Second * 20) sec5 := int64(time.Second * 5) ignoreimmunity := false b := false for { for key, value := range c.cache { if c.sizemax-c.size < c.sizefree { if value.AccessCount <= low { //5 minute immunity to protect freshly added files if int64(start.Sub(value.Added)) >= sec20 || ignoreimmunity { c.Remove(key) } } } else { b = true break } } low++ if int64(time.Now().Sub(start)) >= sec5 { ignoreimmunity = true } if b { break } } end := time.Now() log.Info("[Cache] removed %v elements in %fsec", count-c.GetSize(), end.Sub(start).Seconds()) start = time.Now() runtime.GC() debug.FreeOSMemory() end = time.Now() log.Info("[Cache] run gc manually to free up memory asap, took %fsec", end.Sub(start).Seconds()) c.isRunning = false //GoRuntimeStats() }
//Read the input from the user's reader and push it to the input chan func (mr *MapReduceJob) readInput() { tracker := progressTracker(2, "docs") logging.Info("Reading input") for { record, err := (*mr.mapReducer).Read() if err != nil { logging.Error("Error: Aborting read: %s", err) break } mr.inputChan <- record tracker <- 1 } close(tracker) close(mr.inputChan) logging.Info("Finished reading input!") }
//return the Daily url or "" if something went wrong func (g *Ghostclient) GetDailyUrl() (string, error) { client := &http.Client{ CheckRedirect: Redirect, } log.Info("%s[GET] url: %v", TAG, DAILY) req, err := http.NewRequest("GET", DAILY, nil) if err != nil { log.Error("%s %s", TAG, err.Error()) return "", err } req.Header.Add("Accept", "text/html, application/xhtml+xml, */*") req.Header.Add("Referer", "http://ghost-of-usenet.org/index.php") req.Header.Add("Accept-Language", "de-DE") req.Header.Add("User-Agent", "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/6.0)") req.Header.Add("Accept-Encoding", "gzip, deflate") req.Header.Add("Host", "ghost-of-usenet.org") req.Header.Add("Connection", "Keep-Alive") if g.cookies != nil { for _, cookie := range g.cookies { req.AddCookie(cookie) } } time1 := time.Now() g.dumpRequest(req, "daily_req_"+strconv.Itoa(time1.Nanosecond())) resp, err := client.Do(req) if resp == nil { return "", err } g.dumpResponse(resp, "daily_resp_"+strconv.Itoa(time1.Nanosecond())) url, err := resp.Location() if err != nil { return "", err } log.Info("%s daily url: %v", TAG, url.String()) return url.String(), nil }
// Logs into town.ag and returns the response cookies func (t *Townclient) Login() error { log.Info("%s login process started", TAG) sValue := t.getSValue() if sValue == "" { return errors.New("couldnt find SValue for the Town login") } param := url.Values{} param.Set("do", "login") param.Add("s", sValue) param.Add("securitytoken", "guest") param.Add("vb_login_username", t.User) param.Add("vb_login_password", "") param.Add("cookieuser", "1") param.Add("vb_login_md5password", t.Password) param.Add("vb_login_md5password_utf", t.Password) param.Add("url", "/v2/") client := &http.Client{} req, err := http.NewRequest("POST", LOGIN, strings.NewReader(param.Encode())) if err != nil { return err } log.Info("%s[POST] url: %v", TAG, LOGIN) t.addHeader(req) t.dumpRequest(req, "town_login_req") resp, err := client.Do(req) if err != nil { return err } defer resp.Body.Close() t.dumpResponse(resp, "town_login_resp") t.cookies = resp.Cookies() return nil }
func main() { runtime.GOMAXPROCS(8) my := MyMapReducer{scanner: *bufio.NewScanner(os.Stdin)} mr := gomr.NewMapReduceJob(&my, 80, 80) st := time.Now() mr.Run() logging.Info("Finished in %s", time.Since(st)) }
func (o *OAuthMiddleware) LoginHandler() vertex.Route { handler := func(w http.ResponseWriter, r *vertex.Request) (interface{}, error) { code := r.FormValue("code") logging.Info("Got code: %s", code) tok, err := o.conf.Exchange(oauth2.NoContext, code) if err != nil { return nil, vertex.UnauthorizedError("Could not log you in: %s", err) } user, err := o.userValidator.Login(tok) if err != nil { return nil, vertex.UnauthorizedError("Could not validate user for login: %s", err) } enc, err := o.userValidator.EncodeToken(user) if err != nil { return nil, vertex.UnauthorizedError("Could not validate encode user token: %s", err) } o.setCookie(w, enc, r.Host) if cook, err := r.Cookie(nextUrl); err == nil && cook != nil && cook.Value != "" { logging.Info("Found nextUrl from before auth denied. Redirecting to %s", cook.Value) http.Redirect(w, r.Request, cook.Value, http.StatusTemporaryRedirect) return nil, vertex.Hijacked } return "Success Logging In", nil } return vertex.Route{ Path: loginPath, Description: "OAuth Login", Handler: vertex.HandlerFunc(handler), Methods: vertex.GET, } }
//parse the http resp from Townclient func (g *Ghostparser) ParseReleases() error { log.Info("%s parsing %v", TAG, g.Url) resp, err := g.Gc.Get(g.Url) if err != nil { log.Error("%s %s", TAG, err.Error()) return err } defer resp.Body.Close() respbody, err := html.Parse(resp.Body) doc := goquery.NewDocumentFromNode(respbody) var rel Release doc.Find("table").Each(func(a int, sa *goquery.Selection) { if a == 10 { //get the right table sa.Find("tr").Each(func(b int, sb *goquery.Selection) { sb.Find("td").Each(func(c int, sc *goquery.Selection) { if c == 2 { rel = Release{} g.getUrlAndTagAndName(&rel, sc) if rel.Name != "" { rel.Time = time.Now().Unix() rel.Checksum = g.encodeName(rel.Url) rel.checkQual() if rel.Name != "" { rel.Hits = 0 rel.Rating = 0 g.downloadImage(rel.Url, rel.Checksum) g.addRelease(rel) } } } }) }) } if g.Count == 0 { //get page count if a == 51 { sa.Find("a").Each(func(d int, sd *goquery.Selection) { if d == 3 { g.Count, err = strconv.Atoi(sd.Text()) } }) } } }) return nil }
//run the reducers func (mr *MapReduceJob) runReducers() { reducersGroup := sync.WaitGroup{} logging.Info("Runnign reducers!") //Run N reducers in parallel for i := 0; i < mr.numReducers; i++ { reducersGroup.Add(1) go func() { for record := range mr.reducersInputChan { (*mr.mapReducer).Reduce(record.Key, record.Val.([]interface{}), mr.OutputChan) } reducersGroup.Done() }() } tracker := progressTracker(5, "items") //push the output from the aggregate dictionary to the reducers' input channel for k := range mr.aggregate { mr.reducersInputChan <- Record{k, mr.aggregate[k]} tracker <- 1 } close(tracker) //we close the input channel, causing all reduce loops to exit close(mr.reducersInputChan) //let's wait for them to actually finish reducersGroup.Wait() logging.Info("FINISHED Runnign reducers!") //we now close the output channel close(mr.OutputChan) }
//return the Daily url or "" if something went wrong func (t *Townclient) GetDailyUrl() (string, error) { log.Info("%s getting Daily Url for town", TAG) client := &http.Client{ CheckRedirect: Redirect, } req, err := http.NewRequest("GET", DAILY, nil) if err != nil { log.Error("%s %s", TAG, err.Error()) return "", err } t.addHeader(req) if t.cookies != nil { for _, cookie := range t.cookies { req.AddCookie(cookie) } } resp, err := client.Do(req) if resp == nil { return "", err } defer resp.Body.Close() time1 := time.Now() t.dumpResponse(resp, "daily"+strconv.Itoa(time1.Nanosecond())) url, err := resp.Location() if err != nil { return "", err } log.Info("%s daily url: %v", TAG, url.String()) return url.String(), nil }
//Create a new mapreduce job. Pass a mapreducer object and the number of mappers and reducers in the pools func NewMapReduceJob(mr MapReducer, numReducers int, numMappers int) *MapReduceJob { job := &MapReduceJob{ mappersOutputChan: make(chan Record), OutputChan: make(chan Record), reducersInputChan: make(chan Record), inputChan: make(chan interface{}), mapReducer: &mr, aggregate: make(map[string][]interface{}), numReducers: numReducers, numMappers: numMappers, } logging.Info("Created job with MapReducer %s, %d mappers, %d reducers", mr, numMappers, numReducers) return job }
func (o *OAuthMiddleware) Handle(w http.ResponseWriter, r *vertex.Request, next vertex.HandlerFunc) (interface{}, error) { if strings.HasSuffix(r.URL.Path, loginPath) { return next(w, r) } user, err := o.getToken(r) if err != nil { o.redirect(w, r) return nil, vertex.Hijacked } logging.Info("Request authenticated. Continuing!") r.SetAttribute(AttrUser, user) return next(w, r) }
func NewHTMLRendererFiles(funcMap map[string]interface{}, fileNames ...string) *HTMLRenderer { if funcMap == nil { funcMap = template.FuncMap{} } tpl, err := template.New("html").Funcs(funcMap).ParseFiles(fileNames...) if err != nil { panic(err) } logging.Info("Created template from files %s (%#v)", fileNames, tpl) tpl.ExecuteTemplate(os.Stderr, "html", nil) return &HTMLRenderer{ template: tpl, } }
// invokeTest runs a tester and prints the output func (t *testRunner) invokeTest(path string, tc Tester) *testResult { if t.shouldRun(tc) { var result testResult if tc == nil || t.shouldRun(tc) { result = t.runTest(tc, path) logging.Info("Test result for %s: %#v", path, result) if err := t.formatter.format(result); err != nil { logging.Error("Error running formatter: %s", err) } return &result } } return nil }