func main() { log.SetFlags(0) ctxt := fs.NewContext(new(http.Request)) if err := dashboard.Update(ctxt, nil, "Go 1.2"); err != nil { log.Fatal(err) } log.Print("OK") }
func makeImage(req *http.Request, caption, font string, pt, size, border, scale int, f func(x, y int) uint32) *image.RGBA { d := (size + 2*border) * scale csize := 0 if caption != "" { if pt == 0 { pt = 11 } csize = pt * 2 } c := image.NewRGBA(image.Rect(0, 0, d, d+csize)) // white u := &image.Uniform{C: color.White} draw.Draw(c, c.Bounds(), u, image.ZP, draw.Src) for y := 0; y < size; y++ { for x := 0; x < size; x++ { r := image.Rect((x+border)*scale, (y+border)*scale, (x+border+1)*scale, (y+border+1)*scale) rgba := f(x, y) u.C = color.RGBA{byte(rgba >> 24), byte(rgba >> 16), byte(rgba >> 8), byte(rgba)} draw.Draw(c, r, u, image.ZP, draw.Src) } } if csize != 0 { if font == "" { font = "data/luxisr.ttf" } ctxt := fs.NewContext(req) dat, _, err := ctxt.Read(font) if err != nil { panic(err) } tfont, err := freetype.ParseFont(dat) if err != nil { panic(err) } ft := freetype.NewContext() ft.SetDst(c) ft.SetDPI(100) ft.SetFont(tfont) ft.SetFontSize(float64(pt)) ft.SetSrc(image.NewUniform(color.Black)) ft.SetClip(image.Rect(0, 0, 0, 0)) wid, err := ft.DrawString(caption, freetype.Pt(0, 0)) if err != nil { panic(err) } p := freetype.Pt(d, d+3*pt/2) p.X -= wid.X p.X /= 2 ft.SetClip(c.Bounds()) ft.DrawString(caption, p) } return c }
func AdminDashboard(w http.ResponseWriter, req *http.Request) { version := "Go " + strings.TrimPrefix(req.URL.Path, "/admin/dashboard/") ctxt := fs.NewContext(req) ctxt.Mkdir("issue-dashboard") c := appengine.NewContext(req) if err := dashboard.Update(ctxt, urlfetch.Client(c), version); err != nil { fmt.Fprintf(w, "Error updating: %s\n", err) } else { fmt.Fprintf(w, "Updated.\n") } }
// Show is the handler for showing a stored QR code. func Show(w http.ResponseWriter, req *http.Request) { ctxt := fs.NewContext(req) tag := req.URL.Path[len("/qr/show/"):] png := strings.HasSuffix(tag, ".png") if png { tag = tag[:len(tag)-len(".png")] } if !isTagName(tag) { fmt.Fprintf(w, "Sorry, QR code not found\n") return } if req.FormValue("flag") == "1" { flag(w, req, tag, ctxt) return } data, _, err := ctxt.Read("qrsave/" + tag) if err != nil { fmt.Fprintf(w, "Sorry, QR code not found.\n") return } var m Image if err := json.Unmarshal(data, &m); err != nil { panic(err) } m.Tag = tag switch req.FormValue("size") { case "big": m.Scale *= 2 case "small": m.Scale /= 2 } if png { if err := m.Encode(req); err != nil { panic(err) return } w.Header().Set("Cache-Control", "public, max-age=3600") w.Write(m.Code.PNG()) return } w.Header().Set("Cache-Control", "public, max-age=300") runTemplate(ctxt, w, "qr/permalink.html", &m) }
// toc traverses the file system to build the list of posts func toc(w http.ResponseWriter, req *http.Request, draft bool, isOwner bool, user string) { c := fs.NewContext(req) c.Criticalf("toc() draft=%v isOwner=%v user=%s", draft, isOwner, user) // ☻ Compute cache key for this page var data []byte keystr := fmt.Sprintf("blog:toc:%v", draft) // Key schema: "blog:toc:{true|false}" draft|non-draft if req.FormValue("readdir") != "" { keystr += ",readdir=" + req.FormValue("readdir") // If "readdir:" form value is given, add to cache key } if draft { keystr += ",user="******"blog", &data); ok { w.Write(data) } else { gentoc(w, req, key, draft, isOwner, user) } }
func Dashboard(w http.ResponseWriter, req *http.Request) { httpCache(w, 5*time.Minute) ctxt := fs.NewContext(req) ctxt.ServeFile(w, req, "issue-dashboard/"+strings.TrimPrefix(req.URL.Path, "/dashboard/")) }
func upload(w http.ResponseWriter, req *http.Request, link string) { // Upload of a new image. // Copied from Moustachio demo. f, _, err := req.FormFile("image") if err != nil { fmt.Fprintf(w, "You need to select an image to upload.\n") return } defer f.Close() i, _, err := image.Decode(f) if err != nil { panic(err) } // Convert image to 128x128 gray+alpha. b := i.Bounds() const max = 128 // If it's gigantic, it's more efficient to downsample first // and then resize; resizing will smooth out the roughness. var i1 *image.RGBA if b.Dx() > 4*max || b.Dy() > 4*max { w, h := 2*max, 2*max if b.Dx() > b.Dy() { h = b.Dy() * h / b.Dx() } else { w = b.Dx() * w / b.Dy() } i1 = resize.Resample(i, b, w, h) } else { // "Resample" to same size, just to convert to RGBA. i1 = resize.Resample(i, b, b.Dx(), b.Dy()) } b = i1.Bounds() // Encode to PNG. dx, dy := 128, 128 if b.Dx() > b.Dy() { dy = b.Dy() * dx / b.Dx() } else { dx = b.Dx() * dy / b.Dy() } i128 := resize.ResizeRGBA(i1, i1.Bounds(), dx, dy) var buf bytes.Buffer if err := png.Encode(&buf, i128); err != nil { panic(err) } h := md5.New() h.Write(buf.Bytes()) tag := fmt.Sprintf("%x", h.Sum(nil))[:32] ctxt := fs.NewContext(req) if err := ctxt.Write("qr/upload/"+tag+".png", buf.Bytes()); err != nil { panic(err) } // Redirect with new image tag. // Redirect to draw with new image tag. http.Redirect(w, req, req.URL.Path+"?"+url.Values{"i": {tag}, "url": {link}}.Encode(), 302) }
// Draw is the handler for drawing a QR code. func Draw(w http.ResponseWriter, req *http.Request) { ctxt := fs.NewContext(req) url := req.FormValue("url") if url == "" { url = "http://swtch.com/qr" } if req.FormValue("upload") == "1" { upload(w, req, url) return } t0 := time.Now() img := req.FormValue("i") if !isImgName(img) { img = "pjw" } if req.FormValue("show") == "png" { i := loadSize(ctxt, img, 48) var buf bytes.Buffer png.Encode(&buf, i) w.Write(buf.Bytes()) return } if req.FormValue("flag") == "1" { flag(w, req, img, ctxt) return } if req.FormValue("x") == "" { var data = struct { Name string URL string }{ Name: img, URL: url, } runTemplate(ctxt, w, "qr/main.html", &data) return } arg := func(s string) int { x, _ := strconv.Atoi(req.FormValue(s)); return x } targ := makeTarg(ctxt, img, 17+4*arg("v")+arg("z")) m := &Image{ Name: img, Dx: arg("x"), Dy: arg("y"), URL: req.FormValue("u"), Version: arg("v"), Mask: arg("m"), RandControl: arg("r") > 0, Dither: arg("i") > 0, OnlyDataBits: arg("d") > 0, SaveControl: arg("c") > 0, Scale: arg("scale"), Target: targ, Seed: int64(arg("s")), Rotation: arg("o"), Size: arg("z"), } if m.Version > 8 { m.Version = 8 } if m.Scale == 0 { if arg("l") > 1 { m.Scale = 8 } else { m.Scale = 4 } } if m.Version >= 12 && m.Scale >= 4 { m.Scale /= 2 } if arg("l") == 1 { data, err := json.Marshal(m) if err != nil { panic(err) } h := md5.New() h.Write(data) tag := fmt.Sprintf("%x", h.Sum(nil))[:16] if err := ctxt.Write("qrsave/"+tag, data); err != nil { panic(err) } http.Redirect(w, req, "/qr/show/"+tag, http.StatusTemporaryRedirect) return } if err := m.Encode(req); err != nil { fmt.Fprintf(w, "%s\n", err) return } var dat []byte switch { case m.SaveControl: dat = m.Control default: dat = m.Code.PNG() } if arg("l") > 0 { w.Header().Set("Content-Type", "image/png") w.Write(dat) return } w.Header().Set("Content-Type", "text/html; charset=utf-8") fmt.Fprint(w, "<center><img src=\"data:image/png;base64,") io.WriteString(w, base64.StdEncoding.EncodeToString(dat)) fmt.Fprint(w, "\" /><br>") fmt.Fprintf(w, "<form method=\"POST\" action=\"%s&l=1\"><input type=\"submit\" value=\"Save this QR code\"></form>\n", m.Link()) fmt.Fprintf(w, "</center>\n") fmt.Fprintf(w, "<br><center><font size=-1>%v</font></center>\n", time.Now().Sub(t0)) }
func atomfeed(w http.ResponseWriter, req *http.Request) { c := fs.NewContext(req) c.Criticalf("Header: %v", req.Header) var data []byte if key, ok := c.CacheLoad("blog:atomfeed", "blog/post", &data); !ok { dir, err := c.ReadDir("blog/post") if err != nil { panic(err) } var all []*PostData for _, d := range dir { meta, article, err := loadPost(c, d.Name, req) if err != nil { // Should not happen: we just loaded the directory. panic(err) } if meta.IsDraft() { continue } meta.article = article all = append(all, meta) } sort.Sort(byTime(all)) show := all if len(show) > 10 { show = show[:10] for _, meta := range all[10:] { if meta.Favorite { show = append(show, meta) } } } // // Title // ID // Updated // Author // Name // URI // Email // Link[] // Rel // Href feed := &atom.Feed{ Title: config.FeedTitle, ID: config.FeedID, Updated: atom.Time(show[0].Date.Time), Author: &atom.Person{ Name: config.Name, URI: "https://plus.google.com/" + config.PlusID, Email: config.Email, }, Link: []atom.Link{ {Rel: "self", Href: hostURL(req) + "/feed.atom"}, }, } for _, meta := range show { t := template.New("main") t.Funcs(funcMap) main, _, err := c.Read("blog/atom.html") if err != nil { panic(err) } _, err = t.Parse(string(main)) if err != nil { panic(err) } template.Must(t.New("article").Parse(meta.article)) var buf bytes.Buffer if err := t.Execute(&buf, meta); err != nil { panic(err) } e := &atom.Entry{ Title: meta.Title, ID: feed.ID + "/" + meta.Name, Link: []atom.Link{ {Rel: "alternate", Href: meta.HostURL + "/" + meta.Name}, }, Published: atom.Time(meta.Date.Time), Updated: atom.Time(meta.Date.Time), Summary: &atom.Text{ Type: "text", Body: meta.Summary, }, Content: &atom.Text{ Type: "html", Body: buf.String(), }, } feed.Entry = append(feed.Entry, e) } data, err = xml.Marshal(&feed) if err != nil { panic(err) } c.CacheStore(key, data) } // Feed readers like to hammer us; let Google cache the // response to reduce the traffic we have to serve. httpCache(w, 15*time.Minute) w.Header().Set("Content-Type", "application/atom+xml") w.Write(data) }
// ☻ Rebuild the TOC page, used on cache misses in toc. func gentoc(w http.ResponseWriter, req *http.Request, key fs.CacheKey, draft, isOwner bool, user string) { var data []byte c := fs.NewContext(req) // ☻ Traverse "/blog/post/..." and its descendants dir, err := readDirEllipses(c, "blog/post") if err != nil { panic(err) } // ☻ If "readdir: 1" form field supplied, return number of files if req.FormValue("readdir") == "1" { fmt.Fprintf(w, "%d dir entries\n", len(dir)) return } // ☻ Read postName–>postData from file "/blogcache", if any available postCache := map[string]*PostData{} if data, _, err := c.Read("blogcache"); err == nil { if err := json.Unmarshal(data, &postCache); err != nil { c.Criticalf("unmarshal blogcache: %v", err) } } ch := make(chan *PostData, len(dir)) // ☻ Create a channel whose buffer size equals the number of files in "blog/post" // XXX: This is a limiting mechanism. Use limiter. const par = 20 var limit = make(chan bool, par) // Insert 20 tickets for i := 0; i < par; i++ { limit <- true } // for _, d := range dir { // For each file in directory, if meta := postCache[d.Name]; meta != nil && // Attempt to fetch post meta from "blogcache" file cache; if present, and meta.FileModTime.Equal(d.ModTime) && // The cache copy is not older than the original, and meta.FileSize == d.Size { // They match in size // ch <- meta // Use the cached post meta continue } <-limit go func(d proto.FileInfo) { // Fetch post in parallel defer func() { limit <- true }() meta, _, err := loadPost(c, d.Name, req) if err != nil { // Should not happen: we just listed the directory. c.Criticalf("loadPost %s: %v", d.Name, err) return } ch <- meta }(d) } for i := 0; i < par; i++ { // Wait for all post loads to complete <-limit } close(ch) // Write eof postCache = map[string]*PostData{} // ☻ Update postCache with the fresh data and apply permission/draft filters var all []*PostData for meta := range ch { postCache[meta.Name] = meta if (!draft && !meta.IsDraft() && !meta.NotInTOC) || (isOwner && draft) || meta.canRead(user) { all = append(all, meta) } } sort.Sort(byTime(all)) // ☻ Sort posts chronologically if data, err := json.Marshal(postCache); err != nil { // ☻ Write new TOC cache to "/blogcache" c.Criticalf("marshal blogcache: %v", err) } else if err := c.Write("blogcache", data); err != nil { c.Criticalf("write blogcache: %v", err) } var buf bytes.Buffer // ☻ Render TOC page t := mainTemplate(c) if err := t.Lookup("toc").Execute(&buf, &TocData{ User: c.User(), Draft: draft, HostURL: hostURL(req), DraftRoot: "/draft", PostRoot: "/", Posts: all, }); err != nil { panic(err) } data = buf.Bytes() c.CacheStore(key, data) // w.Write(data) }
func serve(w http.ResponseWriter, req *http.Request) { ctxt := fs.NewContext(req) ctxt.Criticalf("SERVING %s", req.URL.Path) // If a panic occurs in the user logic, // catch it, log it and return a 500 error. defer func() { if err := recover(); err != nil { var buf bytes.Buffer fmt.Fprintf(&buf, "panic: %s\n\n", err) buf.Write(debug.Stack()) ctxt.Criticalf("%s", buf.String()) http.Error(w, buf.String(), 500) } }() p := path.Clean("/" + req.URL.Path) // ☻ If the site is accessed via its appspot URL, redirect to the cutsom URL // to make sure links on the site are not broken. // if strings.Contains(req.Host, "appspot.com") { // http.Redirect(w, req, "http://research.swtch.com" + p, http.StatusFound) // } // ☻ Correct paths missing the root slash if p != req.URL.Path { http.Redirect(w, req, p, http.StatusFound) return } // ☻ Serve atom feed requests if p == "/feed.atom" { atomfeed(w, req) return } // ☻ Determine whether logged user is guest or owner user := ctxt.User() // isOwner = owner in AppEngine isOwner := aeu.IsAdmin(ae.NewContext(req)) || ctxt.User() == config.Account // ☻ If URL signifies the TOC page if p == "" || p == "/" || p == "/draft" { if p == "/draft" && user == "?" { // ☻ Prevent non-owners from viewing draft TOC pages ctxt.Criticalf("/draft loaded by %s", user) notfound(ctxt, w, req) return } toc(w, req, p == "/draft", isOwner, user) // Render return } // draft = we are in draft mode, and only if we have credentials draft := false if strings.HasPrefix(p, "/draft/") { if user == "?" { ctxt.Criticalf("/draft loaded by %s", user) notfound(ctxt, w, req) return } draft = true p = p[len("/draft"):] } /* // There are no valid URLs with slashes after the root or draft part of the URL. // We disable this, since we would like to be able to serve the whole MathJax tree statically. if strings.Contains(p[1:], "/") { notfound(ctxt, w, req) return } */ // If the path contains dots, it is interpreted as a static file if strings.Contains(p, ".") { // Let Google's front end servers cache static content for a short amount of time. // httpCache simply adds a caching directive in the HTTP response // Disable temporarily while fiddling with CSS files //httpCache(w, 5*time.Minute) ctxt.ServeFile(w, req, "blog/static/"+p) return } // Use just 'blog' as the cache path so that if we change // templates, all the cached HTML gets invalidated. var data []byte pp := "bloghtml:" + p if draft && !isOwner { pp += ",user="******"blog", &data); !ok { meta, article, err := loadPost(ctxt, p, req) if err != nil || meta.IsDraft() != draft || (draft && !isOwner && !meta.canRead(user)) { ctxt.Criticalf("no %s for %s", p, user) notfound(ctxt, w, req) return } t := mainTemplate(ctxt) template.Must(t.New("article").Parse(article)) var buf bytes.Buffer meta.Comments = true if err := t.Execute(&buf, meta); err != nil { panic(err) } data = buf.Bytes() ctxt.CacheStore(key, data) } w.Write(data) }
func toc(w http.ResponseWriter, req *http.Request, draft bool, isOwner bool, user string) { c := fs.NewContext(req) var data []byte keystr := fmt.Sprintf("blog:toc:%v", draft) if req.FormValue("readdir") != "" { keystr += ",readdir=" + req.FormValue("readdir") } if draft { keystr += ",user="******"blog", &data); !ok { c := fs.NewContext(req) dir, err := c.ReadDir("blog/post") if err != nil { panic(err) } if req.FormValue("readdir") == "1" { fmt.Fprintf(w, "%d dir entries\n", len(dir)) return } postCache := map[string]*PostData{} if data, _, err := c.Read("blogcache"); err == nil { if err := json.Unmarshal(data, &postCache); err != nil { c.Criticalf("unmarshal blogcache: %v", err) } } ch := make(chan *PostData, len(dir)) const par = 20 var limit = make(chan bool, par) for i := 0; i < par; i++ { limit <- true } for _, d := range dir { if meta := postCache[d.Name]; meta != nil && meta.FileModTime.Equal(d.ModTime) && meta.FileSize == d.Size { ch <- meta continue } <-limit go func(d proto.FileInfo) { defer func() { limit <- true }() meta, _, err := loadPost(c, d.Name, req) if err != nil { // Should not happen: we just listed the directory. c.Criticalf("loadPost %s: %v", d.Name, err) return } ch <- meta }(d) } for i := 0; i < par; i++ { <-limit } close(ch) postCache = map[string]*PostData{} var all []*PostData for meta := range ch { postCache[meta.Name] = meta if meta.IsDraft() == draft && (!draft || isOwner || meta.canRead(user)) { all = append(all, meta) } } sort.Sort(byTime(all)) if data, err := json.Marshal(postCache); err != nil { c.Criticalf("marshal blogcache: %v", err) } else if err := c.Write("blogcache", data); err != nil { c.Criticalf("write blogcache: %v", err) } var buf bytes.Buffer t := mainTemplate(c) if err := t.Lookup("toc").Execute(&buf, &TocData{draft, hostURL(req), all}); err != nil { panic(err) } data = buf.Bytes() c.CacheStore(key, data) } w.Write(data) }
func serve(w http.ResponseWriter, req *http.Request) { ctxt := fs.NewContext(req) defer func() { if err := recover(); err != nil { var buf bytes.Buffer fmt.Fprintf(&buf, "panic: %s\n\n", err) buf.Write(debug.Stack()) ctxt.Criticalf("%s", buf.String()) http.Error(w, buf.String(), 500) } }() p := path.Clean("/" + req.URL.Path) /* if strings.Contains(req.Host, "appspot.com") { http.Redirect(w, req, "http://research.swtch.com" + p, http.StatusFound) } */ if p != req.URL.Path { http.Redirect(w, req, p, http.StatusFound) return } if p == "/feed.atom" { atomfeed(w, req) return } if strings.HasPrefix(p, "/20") && strings.Contains(p[1:], "/") { // Assume this is an old-style URL. oldRedirect(ctxt, w, req, p) } user := ctxt.User() isOwner := ctxt.User() == owner || len(os.Args) >= 2 && os.Args[1] == "LISTEN_STDIN" if p == "" || p == "/" || p == "/draft" { if p == "/draft" && user == "?" { ctxt.Criticalf("/draft loaded by %s", user) notfound(ctxt, w, req) return } toc(w, req, p == "/draft", isOwner, user) return } draft := false if strings.HasPrefix(p, "/draft/") { if user == "?" { ctxt.Criticalf("/draft loaded by %s", user) notfound(ctxt, w, req) return } draft = true p = p[len("/draft"):] } if strings.Contains(p[1:], "/") { notfound(ctxt, w, req) return } if strings.Contains(p, ".") { // Let Google's front end servers cache static // content for a short amount of time. httpCache(w, 5*time.Minute) ctxt.ServeFile(w, req, "blog/static/"+p) return } // Use just 'blog' as the cache path so that if we change // templates, all the cached HTML gets invalidated. var data []byte pp := "bloghtml:" + p if draft && !isOwner { pp += ",user="******"blog", &data); !ok { meta, article, err := loadPost(ctxt, p, req) if err != nil || meta.IsDraft() != draft || (draft && !isOwner && !meta.canRead(user)) { ctxt.Criticalf("no %s for %s", p, user) notfound(ctxt, w, req) return } t := mainTemplate(ctxt) template.Must(t.New("article").Parse(article)) var buf bytes.Buffer meta.Comments = true if err := t.Execute(&buf, meta); err != nil { panic(err) } data = buf.Bytes() ctxt.CacheStore(key, data) } w.Write(data) }