func ScanOfertaBlob(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) const batch = 300 page, _ := strconv.Atoi(r.FormValue("pg")) if page < 1 { page = 1 } offset := batch * (page - 1) q := datastore.NewQuery("Oferta").Offset(offset).Order("-FechaHora").Limit(batch) n, _ := q.Count(c) for i := q.Run(c); ; { var e model.Oferta key, err := i.Next(&e) if err == datastore.Done { break } // Se crea la URL para servir la oferta desde el CDN, si no se puede // se deja en blanco var imgprops image.ServingURLOptions imgprops.Secure = true imgprops.Size = 400 imgprops.Crop = false if e.BlobKey != "none" { reader := blobstore.NewReader(c, e.BlobKey) if _, err := ioutil.ReadAll(reader); err != nil { fmt.Fprintf(w, "Error en idoft: %s, idemp: %s, blobkey: %v, Fecha: %v\n", e.IdOft, e.IdEmp, string(e.BlobKey), e.FechaHora) e.BlobKey = "none" _, err = datastore.Put(c, key, &e) } } } fmt.Fprintf(w, "Batch: %d, count: %d, from %d to %d\n", page, n, offset, offset+n) return }
func GetPostContent(context appengine.Context, p Post) (string, error) { data, err := ioutil.ReadAll(blobstore.NewReader(context, p.Content)) if err != nil { context.Errorf("ioutil.ReadAll: %v", err) return "", err } if len(data) <= 0 { context.Errorf("len(data): %v", len(data)) return "", errors.New("len(data) < 1") } var decoded interface{} err = json.Unmarshal(data, &decoded) if err != nil { context.Errorf("json.Unmarshal: %v", err) return "", err } q := decoded.(map[string]interface{}) content, ok := q["data"].(string) if !ok { context.Errorf("post content has no 'data' field internally") return "", errors.New("post has no 'data' field") } return content, nil }
func (sto *appengineStorage) Fetch(br blob.Ref) (file io.ReadCloser, size uint32, err error) { loan := ctxPool.Get() ctx := loan defer func() { if loan != nil { loan.Return() } }() row, err := fetchEnt(ctx, br) if err == datastore.ErrNoSuchEntity { err = os.ErrNotExist return } if err != nil { return } if !row.inNamespace(sto.namespace) { err = os.ErrNotExist return } closeLoan := loan var c io.Closer = &onceCloser{fn: func() { closeLoan.Return() }} loan = nil // take it, so it's not defer-closed reader := blobstore.NewReader(ctx, appengine.BlobKey(string(row.BlobKey))) type readCloser struct { io.Reader io.Closer } return readCloser{reader, c}, uint32(row.Size), nil }
func upload(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) blobs, _, err := blobstore.ParseUpload(r) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } file := blobs["file"] if len(file) == 0 { c.Errorf("No file uploaded") http.Redirect(w, r, "/", http.StatusFound) return } reader := blobstore.NewReader(c, file[0].BlobKey) var pkg *Package switch file[0].ContentType { case "application/x-tar": pkg, err = parsePackageVarsFromTar(bufio.NewReader(reader)) if err == nil { pkg.Type = TAR } case "application/octet-stream": pkg, err = parsePackageVarsFromFile(bufio.NewReader(reader)) if err == nil { pkg.Type = SINGLE } default: http.Error(w, err.Error(), http.StatusBadRequest) } if err != nil { c.Errorf(fmt.Sprintf("Error reading from upload: %v", err)) http.Error(w, err.Error(), http.StatusInternalServerError) return } key := packageKey(c, pkg.Name) _, err = datastore.Put(c, key, pkg) if err != nil { c.Errorf(fmt.Sprintf("Failed to save package %v", pkg.Name)) http.Error(w, err.Error(), http.StatusInternalServerError) return } contents := Contents{ BlobKey: file[0].BlobKey, Version: pkg.LatestVersion, UploadTime: time.Now().UTC(), } _, err = datastore.Put(c, versionKey(c, pkg.LatestVersion, key), &contents) if err != nil { c.Errorf( fmt.Sprintf( "Failed to save contents for version %v, package %v", pkg.LatestVersion, pkg.Name)) http.Error(w, err.Error(), http.StatusInternalServerError) return } http.Redirect(w, r, "/upload_complete.html?package="+ url.QueryEscape(pkg.Name), http.StatusFound) }
func handleHome(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/html; charset=utf-8") if r.Method != "GET" || r.URL.Path != "/" { serve404(w) return } c := appengine.NewContext(r) q := datastore.NewQuery("Page").Order("-Date").Limit(4) n, _ := q.Count(c) //http.Error(w, strconv.Itoa(n), 404) pages := make([]Page, 0, 4) if n > 0 { _, err := q.GetAll(c, &pages) if nil != err { serveError(c, w, err) return } } if 0 == n { pages = append(pages, Page{}) } for i := n; i < 4; i++ { pages = append(pages, pages[0]) } headPage := pages[0] otherPages := pages[1:4] reader := blobstore.NewReader(c, headPage.HTML) buf := new(bytes.Buffer) buf.ReadFrom(reader) err := homeTemplate.ExecuteTemplate(w, "Head", headPage) if err != nil { serveError(c, w, err) return } err = homeTemplate.ExecuteTemplate(w, "Blob", buf) if err != nil { serveError(c, w, err) return } err = homeTemplate.ExecuteTemplate(w, "Title", headPage.Title) if err != nil { serveError(c, w, err) return } err = homeTemplate.ExecuteTemplate(w, "Other", otherPages) if err != nil { serveError(c, w, err) return } return }
func blob(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) w.Header().Set("Content-Type", "image/png") f := blobstore.NewReader(c, appengine.BlobKey("AMIfv97HhOdzO1aYQEe0QBrzbWSjSgWr2-JUxFJh_KnwxAhEdAqqK76TeE7vm5eDJW0ZoMwFVwur0Ub3t1kD_KzP3yJi4LIG6A-dCdJrJYafoJgH7SITCBum4MF9CY-C7na5fBulmKwQXd2mEYMyfk_RDgeQN1SZug")) buf := make([]byte, 1024*1024) n, err := f.Read(buf) if err != nil { panic(err) } w.Write(buf[:n]) }
// GetFileBlob retrieves a specific FileBlob from the datastore and opens an // io.Reader for that blob. func GetFileBlob(c appengine.Context, t, n string) (*FileBlob, error) { fb := &FileBlob{ Type: t, Name: n, } err := datastore.Get(c, fb.key(c), fb) if err != nil { return fb, err } fb.Data = blobstore.NewReader(c, fb.BlobKey) return fb, nil }
func handleFile(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/html; charset=utf-8") c := appengine.NewContext(r) reader := blobstore.NewReader(c, appengine.BlobKey(r.FormValue("blobKey"))) buf := new(bytes.Buffer) buf.ReadFrom(reader) err := pageTemplate.Execute(w, buf) if err != nil { serveError(c, w, err) } //blobstore.Send(w, appengine.BlobKey(r.FormValue("blobKey"))) }
func RestoreTask(w http.ResponseWriter, r *http.Request) { blobKey := appengine.BlobKey(r.FormValue("blobKey")) c := appengine.NewContext(r) blobInfo, err := blobstore.Stat(c, blobKey) if err != nil { c.Errorf("%v", err) return } c.Infof("Restoring from %s", blobInfo.Filename) reader := blobstore.NewReader(c, blobKey) LoadDB(c, reader) }
func loadZipMap(c appengine.Context) error { b, err := findFileBlob(c, "zipcode.csv") if err != nil { return err } reader := blobstore.NewReader(c, b) zipMap, err = zipcode.GetMap(reader) if err != nil { return err } return nil }
func importOPMLTask(pfc *PFContext) (TaskMessage, error) { c := pfc.C var blobKey appengine.BlobKey if blobKeyString := pfc.R.PostFormValue("opmlBlobKey"); blobKeyString == "" { return TaskMessage{}, errors.New("Missing blob key") } else { blobKey = appengine.BlobKey(blobKeyString) } reader := blobstore.NewReader(c, blobKey) opml, err := rss.ParseOPML(reader) if err != nil { // Remove the blob if err := blobstore.Delete(c, blobKey); err != nil { c.Warningf("Error deleting blob (key %s): %s", blobKey, err) } return TaskMessage{}, err } // Remove the blob if err := blobstore.Delete(c, blobKey); err != nil { c.Warningf("Error deleting blob (key %s): %s", blobKey, err) } importStarted := time.Now() parentRef := storage.FolderRef{ UserID: pfc.UserID, } doneChannel := make(chan *rss.Outline) importing := importSubscriptions(pfc, doneChannel, pfc.UserID, parentRef, opml.Outlines()) for i := 0; i < importing; i++ { subscription := <-doneChannel c.Infof("Completed %s", subscription.Title) } c.Infof("All completed in %s", time.Since(importStarted)) return TaskMessage{ Message: _l("Subscriptions imported successfully"), Refresh: true, }, nil }
func GedcomHandler(w http.ResponseWriter, r *http.Request) { c := appengine.NewContext(r) key := appengine.BlobKey(r.FormValue("key")) buffer := new(bytes.Buffer) buffer.ReadFrom(blobstore.NewReader(c, key)) var raw_data RawGedCom if !raw_data.Parse(buffer) { w.WriteHeader(http.StatusBadRequest) fmt.Fprintf(w, "Bad data") return } w.WriteHeader(http.StatusOK) fmt.Fprintf(w, "ok") }
func GetFromBlobstore(c appengine.Context, blobkey appengine.BlobKey, dst interface{}) (interface{}, error) { //TODO: check capabilities reader := blobstore.NewReader(c, blobkey) data, err := ioutil.ReadAll(reader) if err != nil { c.Errorf("Datastore - GetFromBlobstore - error 2 - %s", err) return nil, err } dec := gob.NewDecoder(bytes.NewBuffer(data)) err = dec.Decode(dst) if err != nil { c.Errorf("Datastore - GetFromBlobstore - error 3 - %s", err) return nil, err } return dst, nil }
func (sto *appengineStorage) FetchStreaming(br *blobref.BlobRef) (file io.ReadCloser, size int64, err error) { ctx := sto.ctx var loan ContextLoan if ctx == nil { loan = ctxPool.Get() ctx = loan defer func() { if loan != nil { loan.Return() } }() } row, err := fetchEnt(ctx, br) if err == datastore.ErrNoSuchEntity { err = os.ErrNotExist return } if err != nil { return } if !row.inNamespace(sto.namespace) { err = os.ErrNotExist return } size, err = row.size() if err != nil { return } var c io.Closer if loan != nil { closeLoan := loan c = &onceCloser{fn: func() { closeLoan.Return() }} loan = nil // take it, so it's not defer-closed } else { c = dummyCloser } reader := blobstore.NewReader(ctx, appengine.BlobKey(string(row.BlobKey))) type readCloser struct { io.Reader io.Closer } return readCloser{reader, c}, size, nil }
func importOPML(pfc *PFContext) (interface{}, error) { c := pfc.C r := pfc.R blobs, other, err := blobstore.ParseUpload(r) if err != nil { return nil, NewReadableError(_l("Error receiving file"), &err) } else if len(other["client"]) > 0 { if clientID := other["client"][0]; clientID != "" { pfc.ChannelID = string(pfc.UserID) + "," + clientID } } var blobKey appengine.BlobKey if blobInfos := blobs["opml"]; len(blobInfos) == 0 { return nil, NewReadableError(_l("File not uploaded"), nil) } else { blobKey = blobInfos[0].BlobKey reader := blobstore.NewReader(c, blobKey) if _, err := rss.ParseOPML(reader); err != nil { if err := blobstore.Delete(c, blobKey); err != nil { c.Warningf("Error deleting blob (key %s): %s", blobKey, err) } return nil, NewReadableError(_l("Error reading OPML file"), &err) } } params := taskParams{ "opmlBlobKey": string(blobKey), } if err := startTask(pfc, "import", params, importQueue); err != nil { // Remove the blob if err := blobstore.Delete(c, blobKey); err != nil { c.Warningf("Error deleting blob (key %s): %s", blobKey, err) } return nil, NewReadableError(_l("Cannot import - too busy"), &err) } return _l("Importing, please wait…"), nil }
func updatePage(c appengine.Context, blobKey appengine.BlobKey) (Page, error) { reader := blobstore.NewReader(c, blobKey) buf := new(bytes.Buffer) buf.ReadFrom(reader) xmlDoc := new(DocumentXML) err := xml.Unmarshal(buf.Bytes(), xmlDoc) var newPage Page if nil != err { c.Errorf("%v", err) return newPage, err } newPage.GetDoc(xmlDoc) newPage.Date = time.Now() newPage.BlobKey = blobKey // if have the same doc key := datastore.NewKey(c, "Page", newPage.Title, 0, nil) err = datastore.RunInTransaction(c, func(c appengine.Context) error { var savedPage Page err := datastore.Get(c, key, &savedPage) if nil != err && datastore.ErrNoSuchEntity != err { c.Errorf("%v", err) return err } if datastore.ErrNoSuchEntity != err { err = blobstore.Delete(c, savedPage.BlobKey) if nil != err { c.Errorf("%v", err) return err } } _, err = datastore.Put(c, key, &newPage) if nil != err { c.Errorf("%v", err) return err } return nil }, nil) if nil != err { return newPage, err } return newPage, err }
// - Reads a blobstore key, writes to a cloud storage bucket file. // - Returns a []byte of a JPEG or a non-nil error func CompressBlob(c appengine.Context, blobKey appengine.BlobKey, params *Params) ([]byte, error) { // Check that the blob is of supported mime-type if !allowedMimeTypes[strings.ToLower(params.MimeType)] { return nil, errors.New("Unsupported mime-type:" + params.MimeType) } // Instantiate blobstore reader reader := blobstore.NewReader(c, blobKey) // Instantiate the image object img, _, err := image.Decode(reader) if err != nil { return nil, err } // Resize if necessary an maintain aspect ratio if params.Size > 0 && (img.Bounds().Max.X > params.Size || img.Bounds().Max.Y > params.Size) { size_x := img.Bounds().Max.X size_y := img.Bounds().Max.Y if size_x > params.Size { size_x_before := size_x size_x = params.Size size_y = int(math.Floor(float64(size_y) * float64(float64(size_x)/float64(size_x_before)))) } if size_y > params.Size { size_y_before := size_y size_y = params.Size size_x = int(math.Floor(float64(size_x) * float64(float64(size_y)/float64(size_y_before)))) } img = Resize(img, img.Bounds(), size_x, size_y) } // Write JPEG to buffer b := new(bytes.Buffer) o := &jpeg.Options{Quality: params.Quality} if err := jpeg.Encode(b, img, o); err != nil { return nil, err } // Return image content return b.Bytes(), nil }
func handlePages(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/html; charset=utf-8") if r.Method != "GET" || r.URL.Path != "/pages" { serve404(w) return } title := r.FormValue("Title") if r.Method != "GET" || r.URL.Path != "/pages" || "" == title { serve404(w) return } c := appengine.NewContext(r) key := datastore.NewKey(c, "Page", title, 0, nil) page := new(Page) err := datastore.Get(c, key, page) if nil != err && datastore.ErrNoSuchEntity != err { serveError(c, w, err) return } if datastore.ErrNoSuchEntity == err { serve404(w) return } reader := blobstore.NewReader(c, page.BlobKey) buf := new(bytes.Buffer) buf.ReadFrom(reader) err = pageTemplate.ExecuteTemplate(w, "Blob", buf) if err != nil { serveError(c, w, err) return } err = pageTemplate.ExecuteTemplate(w, "Page", page) if err != nil { serveError(c, w, err) return } return }
func handleServe(w http.ResponseWriter, r *http.Request) { // Instantiate blobstore reader reader := blobstore.NewReader(appengine.NewContext(r), appengine.BlobKey(r.FormValue("blobKey"))) lat, lng, _ := getLatLng(reader) blobstore.Delete(appengine.NewContext(r), appengine.BlobKey(r.FormValue("blobKey"))) if lat == "" { io.WriteString(w, "Sorry but your photo has no GeoTag information...") return } s := "http://maps.googleapis.com/maps/api/staticmap?zoom=5&size=600x300&maptype=roadmap&center=" s = s + lat + "," + lng + "&markers=color:blue%7Clabel:I%7C" + lat + "," + lng img := "<img src='" + s + "' alt='map' />" fmt.Fprint(w, img) }
func serveThumbnail(w http.ResponseWriter, r *http.Request) { parts := strings.Split(r.URL.Path, "/") if len(parts) == 3 { if key := parts[2]; key != "" { var data []byte c := appengine.NewContext(r) item, err := memcache.Get(c, key) if err == nil { data = item.Value } else { blobKey := appengine.BlobKey(key) if _, err = blobstore.Stat(c, blobKey); err == nil { fi := FileInfo{Key: blobKey} data, _ = fi.CreateThumbnail( blobstore.NewReader(c, blobKey), c, ) } } if err == nil && len(data) > 3 { w.Header().Add( "Cache-Control", fmt.Sprintf("public,max-age=%d", EXPIRATION_TIME), ) contentType := "image/png" if string(data[:3]) == "GIF" { contentType = "image/gif" } else if string(data[1:4]) != "PNG" { contentType = "image/jpeg" } w.Header().Set("Content-Type", contentType) fmt.Fprintln(w, string(data)) return } } } http.Error(w, "404 Not Found", http.StatusNotFound) }
func (sto *appengineStorage) FetchStreaming(br *blobref.BlobRef) (file io.ReadCloser, size int64, err os.Error) { if sto.ctx == nil { err = errNoContext return } row, err := fetchEnt(sto.ctx, br) if err == datastore.ErrNoSuchEntity { err = os.ErrNotExist return } if err != nil { return } if !row.inNamespace(sto.namespace) { err = os.ErrNotExist return } size, err = row.size() if err != nil { return } reader := blobstore.NewReader(sto.ctx, appengine.BlobKey(string(row.BlobKey))) return ioutil.NopCloser(reader), size, nil }
// rotate rotates the Painting's image counter-clockwise by angle in degrees. // angle modulo 360 must be one of 0, 90, 180, 270. func (p *Painting) rotate(c appengine.Context, angle int) error { switch math.Abs(float64(angle % 360)) { case 0, 90, 180, 270: break default: return errors.New(fmt.Sprintf("painting: Unsupported angle %f.", angle)) } if p.Image == (Image{}) { return nil } // Read the image from the blobstore. r := blobstore.NewReader(c, p.Image.BlobKey) src, _, err := image.Decode(r) if err != nil { return err } // Create the rotated image. srcRect := src.Bounds() var dstRect image.Rectangle if angle == 0 || angle == 180 { dstRect = srcRect } else { dstRect = image.Rect(0, 0, srcRect.Dy(), srcRect.Dx()) } dst := image.NewNRGBA(dstRect) err = graphics.Rotate(dst, src, &graphics.RotateOptions{ Angle: float64(angle%360) * math.Pi / 180, }) if err != nil { return err } // Create a new blob for the rotated image. w, err := blobstore.Create(c, "image/png") if err != nil { return err } err = png.Encode(w, dst) if err != nil { return err } err = w.Close() if err != nil { return err } // Delete the old blob. deleteBlobLater.Call(c, p.Image.BlobKey) // Update the image metadata. p.Image.BlobKey, err = w.Key() if err != nil { return err } p.Image.Width = dstRect.Dx() p.Image.Height = dstRect.Dy() u, err := aeimage.ServingURL(c, p.Image.BlobKey, nil) if err != nil { return err } p.Image.URL = u.String() err = p.Save(c) if err != nil { return err } return nil }
func ImportOpmlTask(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) userid := r.FormValue("user") bk := r.FormValue("key") fr := blobstore.NewReader(c, appengine.BlobKey(bk)) data, err := ioutil.ReadAll(fr) if err != nil { return } var skip int if s, err := strconv.Atoi(r.FormValue("skip")); err == nil { skip = s } c.Debugf("reader import for %v, skip %v", userid, skip) var userOpml []*OpmlOutline remaining := skip var proc func(label string, outlines []*OpmlOutline) proc = func(label string, outlines []*OpmlOutline) { for _, o := range outlines { if o.XmlUrl != "" { if remaining > 0 { remaining-- } else if len(userOpml) < IMPORT_LIMIT { userOpml = append(userOpml, &OpmlOutline{ Title: label, Outline: []*OpmlOutline{o}, }) } } if o.Title != "" && len(o.Outline) > 0 { proc(o.Title, o.Outline) } } } opml := Opml{} if err := xml.Unmarshal(data, &opml); err != nil { c.Errorf("opml error: %v", err.Error()) return } proc("", opml.Outline) // todo: refactor below with similar from ImportReaderTask wg := sync.WaitGroup{} wg.Add(len(userOpml)) for i := range userOpml { go func(i int) { o := userOpml[i].Outline[0] if err := addFeed(c, userid, userOpml[i]); err != nil { c.Warningf("opml import error: %v", err.Error()) // todo: do something here? } c.Debugf("opml import: %s, %s", o.Title, o.XmlUrl) wg.Done() }(i) } wg.Wait() ud := UserData{Id: "data", Parent: gn.Key(&User{Id: userid})} if err := gn.RunInTransaction(func(gn *goon.Goon) error { gn.Get(&ud) mergeUserOpml(&ud, opml.Outline...) _, err := gn.Put(&ud) return err }, nil); err != nil { w.WriteHeader(http.StatusInternalServerError) c.Errorf("ude update error: %v", err.Error()) return } if len(userOpml) == IMPORT_LIMIT { task := taskqueue.NewPOSTTask(routeUrl("import-opml-task"), url.Values{ "key": {bk}, "user": {userid}, "skip": {strconv.Itoa(skip + IMPORT_LIMIT)}, }) taskqueue.Add(c, task, "import-reader") } }
func ImportReaderTask(c mpg.Context, w http.ResponseWriter, r *http.Request) { gn := goon.FromContext(c) userid := r.FormValue("user") bk := r.FormValue("key") fr := blobstore.NewReader(c, appengine.BlobKey(bk)) data, err := ioutil.ReadAll(fr) if err != nil { return } var skip int if s, err := strconv.Atoi(r.FormValue("skip")); err == nil { skip = s } v := struct { Subscriptions []struct { Id string `json:"id"` Title string `json:"title"` HtmlUrl string `json:"htmlUrl"` Categories []struct { Id string `json:"id"` Label string `json:"label"` } `json:"categories"` } `json:"subscriptions"` }{} json.Unmarshal(data, &v) c.Debugf("reader import for %v, skip %v, len %v", userid, skip, len(v.Subscriptions)) end := skip + IMPORT_LIMIT if end > len(v.Subscriptions) { end = len(v.Subscriptions) } wg := sync.WaitGroup{} wg.Add(end - skip) userOpml := make([]*OpmlOutline, end-skip) for i := range v.Subscriptions[skip:end] { go func(i int) { sub := v.Subscriptions[skip+i] var label string if len(sub.Categories) > 0 { label = sub.Categories[0].Label } outline := &OpmlOutline{ Title: label, Outline: []*OpmlOutline{ &OpmlOutline{ XmlUrl: sub.Id[5:], Title: sub.Title, }, }, } userOpml[i] = outline if err := addFeed(c, userid, outline); err != nil { c.Warningf("reader import error: %v", err.Error()) // todo: do something here? } c.Debugf("reader import: %s, %s", sub.Title, sub.Id) wg.Done() }(i) } wg.Wait() ud := UserData{Id: "data", Parent: gn.Key(&User{Id: userid})} if err := gn.RunInTransaction(func(gn *goon.Goon) error { gn.Get(&ud) mergeUserOpml(&ud, userOpml...) _, err := gn.Put(&ud) return err }, nil); err != nil { w.WriteHeader(http.StatusInternalServerError) c.Errorf("ude update error: %v", err.Error()) return } if end < len(v.Subscriptions) { task := taskqueue.NewPOSTTask(routeUrl("import-reader-task"), url.Values{ "key": {bk}, "user": {userid}, "skip": {strconv.Itoa(skip + IMPORT_LIMIT)}, }) taskqueue.Add(c, task, "import-reader") } else { blobstore.Delete(c, appengine.BlobKey(bk)) } }
func filterEdges(c appengine.Context, filename string, chrm string, start int, end int, filters []filter) ([]edge, os.Error) { out := make([]edge, 0, 2) indexname := fmt.Sprintf("%s.index.%s.json", filename, chrm) c.Logf("attempting to load %s from m cache", indexname) var indexjson []byte switch indexitem, err := memcache.Get(c, indexname); { case err == memcache.ErrCacheMiss: c.Logf("item not in the cache") q := datastore.NewQuery("fileNameToKey").Filter("Filename=", indexname) blobs := make([]fileNameToKey, 0, 100) if _, err := q.GetAll(c, &blobs); err != nil { c.Logf("%v", err) } if len(blobs) == 0 { return out, nil } c.Logf("blobs[0] is %v %v", blobs[0].Filename, blobs[0].BlobKey) blobreader := blobstore.NewReader(c, blobs[0].BlobKey) c.Logf("reading blob into indexjson") c.Logf("indexjson is %v long before", len(indexjson)) var readerr os.Error if indexjson, readerr = ioutil.ReadAll(blobreader); readerr != nil && readerr != os.EOF { c.Logf("error loading json from blob: %v", readerr) return out, readerr } item := &memcache.Item{Key: indexname, Value: indexjson, } // Add the item to the memcache, if the key does not already exist if err := memcache.Add(c, item); err == memcache.ErrNotStored { c.Logf("item with key %q already exists", item.Key) } else if err != nil { c.Logf("error adding item: %v", err) } case err != nil: c.Logf("error getting item: %v", err) case err == nil: c.Logf(" indexjson Loaded from memcache.") indexjson = indexitem.Value } c.Logf("indexjson is %v long after", len(indexjson)) index := make([]edge, 0, 100) if err := json.Unmarshal(indexjson, &index); err != nil { c.Logf("error parseingjson: %v", err) return out, err } if len(index) > 0 { leftbound := bisect_left(index, start, 0, -1) rightbound := bisect_right(index, end, 0, -1) for i := leftbound; i < rightbound; i++ { e := index[i] includeme := false if len(filters) > 0 { for _, filter := range filters { switch min, err := strconv.Atof32(filter.Minscore); { case err != nil: c.Logf("error parseing filter: %v", err) case e.Type == filter.Type && e.Score >= min: includeme = true break } } } else { includeme = true } if includeme { out = append(out, e) } } } return out, nil }
func Save(blog_config map[string]interface{}) func(w http.ResponseWriter, req *http.Request) { l := func(w http.ResponseWriter, req *http.Request) { appcontext := appengine.NewContext(req) blobs, _, err := blobstore.ParseUpload(req) if err != nil { appcontext.Errorf("error parsing blobstore! %v", err) http.Error(w, "error parsing blobstore!", http.StatusBadRequest) return } //key := "coolguys" key := req.FormValue("g") if key == "" { http.Error(w, "did not specify a key!", http.StatusBadRequest) return } send_message := func(stat string, color string) { con := map[string]interface{}{"status": stat, "color": color} err := channel.SendJSON(appcontext, key, con) if err != nil { appcontext.Errorf("sending update message: %v", err) http.Error(w, err.Error(), http.StatusInternalServerError) } } bdata, ok := blobs["data"] if !ok { http.Error(w, "did not specify data as a param", http.StatusBadRequest) send_message("internal error while saving!", "#AA0000") return } if len(bdata) != 1 { appcontext.Errorf("error parsing blobstore!", err) http.Error(w, "error parsing blobstore!", http.StatusBadRequest) return } jsonkey := bdata[0].BlobKey data, err := ioutil.ReadAll(blobstore.NewReader(appcontext, jsonkey)) if err != nil { appcontext.Errorf("error parsing blobstore!", err) http.Error(w, "error parsing blobstore!", http.StatusBadRequest) return } if len(data) <= 0 { http.Error(w, "did not specify data as a param", http.StatusBadRequest) send_message("internal error while saving!", "#AA0000") return } var decoded interface{} err = json.Unmarshal(data, &decoded) if err != nil { http.Error(w, err.Error(), http.StatusBadRequest) send_message("internal error while saving!", "#AA0000") return } q := decoded.(map[string]interface{}) _, ok = q["data"].(string) if !ok { http.Error(w, "error: must supply JSON with 'data' specified!", http.StatusBadRequest) send_message("internal error while saving!", "#AA0000") return } title, ok := q["title"].(string) if !ok { http.Error(w, "error: must supply JSON with 'title' specified!", http.StatusBadRequest) send_message("internal error while saving!", "#AA0000") return } labels, ok := q["labels"].(string) if !ok { http.Error(w, "error: must supply JSON with 'labels' specified!", http.StatusBadRequest) send_message("internal error while saving!", "#AA0000") return } individual_labels := strings.Split(labels, ",") real_labels := make([]string, len(individual_labels)) for i := range individual_labels { real_labels[i] = strings.ToLower(strings.Trim(individual_labels[i], " \t")) } _, err = post.SavePost(appcontext, title, jsonkey, real_labels, time.Now()) if err != nil { appcontext.Errorf("saving a post: %v", err) http.Error(w, err.Error(), http.StatusInternalServerError) send_message("internal error while saving!", "#AA0000") return } send_message("saved", "#00AA00") } return l }
func ImportOpml(c mpg.Context, w http.ResponseWriter, r *http.Request) { cu := user.Current(c) gn := goon.FromContext(c) u := User{Id: cu.ID} if err := gn.Get(&u); err != nil { serveError(w, err) return } backupOPML(c) blobs, _, err := blobstore.ParseUpload(r) if err != nil { serveError(w, err) return } fs := blobs["file"] if len(fs) == 0 { serveError(w, fmt.Errorf("no uploaded file found")) return } file := fs[0] fr := blobstore.NewReader(c, file.BlobKey) del := func() { blobstore.Delete(c, file.BlobKey) } fdata, err := ioutil.ReadAll(fr) if err != nil { del() serveError(w, err) return } buf := bytes.NewReader(fdata) // attempt to extract from google reader takeout zip if zb, zerr := zip.NewReader(buf, int64(len(fdata))); zerr == nil { for _, f := range zb.File { if strings.HasSuffix(f.FileHeader.Name, "Reader/subscriptions.xml") { if rc, rerr := f.Open(); rerr == nil { if fb, ferr := ioutil.ReadAll(rc); ferr == nil { fdata = fb break } } } } } // Preflight the OPML, so we can report any errors. d := xml.NewDecoder(bytes.NewReader(fdata)) d.CharsetReader = charset.NewReader d.Strict = false opml := Opml{} if err := d.Decode(&opml); err != nil { del() serveError(w, err) c.Errorf("opml error: %v", err.Error()) return } task := taskqueue.NewPOSTTask(routeUrl("import-opml-task"), url.Values{ "key": {string(file.BlobKey)}, "user": {cu.ID}, }) taskqueue.Add(c, task, "import-reader") }
func (fileStore *blobstoreFileStore) GetReader(key string) io.Reader { reader := blobstore.NewReader(fileStore.context, appengine.BlobKey(key)) return reader }
/* * Handles individual blobs. * * - Only supported image types will be processed. Others will be returned as-is. * - Resizes the image if necessary. * - Writes the new compressed JPEG to blobstore. * - Deletes the old blob and substitutes the old BlobInfo with the new one. */ func handleBlob(options *compressionOptions, blobOriginal *blobstore.BlobInfo) (blob *blobstore.BlobInfo) { blob = blobOriginal // Check that the blob is of supported mime-type if !validateMimeType(blob) { return } // Instantiate blobstore reader reader := blobstore.NewReader(options.Context, blob.BlobKey) // Instantiate the image object img, _, err := image.Decode(reader) if err != nil { return } // Resize if necessary // Maintain aspect ratio! if options.Size > 0 && (img.Bounds().Max.X > options.Size || img.Bounds().Max.Y > options.Size) { size_x := img.Bounds().Max.X size_y := img.Bounds().Max.Y if size_x > options.Size { size_x_before := size_x size_x = options.Size size_y = int(math.Floor(float64(size_y) * float64(float64(size_x)/float64(size_x_before)))) } if size_y > options.Size { size_y_before := size_y size_y = options.Size size_x = int(math.Floor(float64(size_x) * float64(float64(size_y)/float64(size_y_before)))) } img = resize.Resize(img, img.Bounds(), size_x, size_y) } // JPEG options o := &jpeg.Options{ Quality: options.Quality, } // Open writer writer, err := blobstore.Create(options.Context, "image/jpeg") if err != nil { return } // Write to blobstore if err := jpeg.Encode(writer, img, o); err != nil { _ = writer.Close() return } // Close writer if err := writer.Close(); err != nil { return } // Get key newKey, err := writer.Key() if err != nil { return } // Get new BlobInfo newBlobInfo, err := blobstore.Stat(options.Context, newKey) if err != nil { return } // All good! // Now replace the old blob and delete it deleteOldBlob(options, blob.BlobKey) blob = newBlobInfo return }
func handleHunt(w http.ResponseWriter, r *http.Request) { var td HuntTemplateData var err os.Error defer recoverUserError(w, r) c = appengine.NewContext(r) td.User = requireAnyUser(w, r) LogAccess(r, td.User) if td.User == "" { panic("requireAnyUser did not return a username") } // get hunt name from URL path huntSearchName := strings.Split(strings.Replace(r.URL.Path, huntPath, "", 1), "/", 2)[0] // look up hunt in directory huntQuery := datastore.NewQuery(huntDirectoryDatastore).Filter("HuntName=", huntSearchName).Limit(1) huntentries := make([]HuntDirectoryEntry, 0, 1) if _, err := huntQuery.GetAll(c, &huntentries); err != nil { serveError(c, w, err) return } if len(huntentries) != 1 { panic("Hunt not found") } // hunt found, load hunt data from blobstore td.HuntDirectoryEntry = &huntentries[0] // sets all HuntDirectoryEntry fields without copying data c.Debugf("handleHunt: calling DecodeHuntData on blobkey %v", td.BlobKey) huntData, err := DecodeHuntData(blobstore.NewReader(c, td.BlobKey)) if err != nil { serveError(c, w, err) } // now have huntData if appengine.IsDevAppServer() { td.DebugHuntData = huntData // exposes all huntData fields in td } // get and/or initialize current state var currentHuntState *HuntState currentHuntState, err = GetCurrentHuntState(td.User, td.HuntName) if err != nil { serveError(c, w, err) } if currentHuntState == nil { // set initial state (add transition from "START" to the hunt EnterStaet if huntData.EnterState == "" { // hunt didn't specify initial state panic("Hunt did not specify EnterState, cannot initialize hunt") } err = StateTransition(td.User, td.HuntName, huntData.EnterState, "") if err != nil { panic("error setting initial state") } c.Debugf("handleHunt: StateTransition complete, getting current hunt state") currentHuntState, err = GetCurrentHuntState(td.User, td.HuntName) if err != nil { serveError(c, w, err) } if currentHuntState == nil { panic("currentHuntState nil after setting initial state") } } c.Debugf("handleHunt: Have currentHuntState %v", currentHuntState) currentStateName := currentHuntState.CurrentStateName td.CurrentState = huntData.States[currentStateName] // if currentHuntState.FromState == "" { // // suppress back button when previous state is not set // td.SuppressBackButton = true // } // get answer submission, if any err = r.ParseForm() if err != nil { serveError(c, w, err) return } answerAttempt := r.FormValue("Answer") var correct = false var cluesHaveAnswers = false var allCluesCorrect = true // check answer against all clues with answers, also noting whether any clues in this state contain an answerable answer (otherwise we are in next/previous state) var clues []Clue = td.CurrentState.Clues for _, clue := range clues { // have a clue c.Debugf("handleHunt: have clue with answer: %v and answertype: %v", clue.Answer, clue.AnswerType) if clue.Answerable() { // this clues answer requires a form to answer cluesHaveAnswers = true } correct = clue.AnswerCorrect(answerAttempt, td.User, td.HuntName) if !correct { allCluesCorrect = false // check if there is an IncorrectAnswerState and transition to it if clue.IncorrectAnswerState != "" { err = StateTransition(td.User, td.HuntName, clue.IncorrectAnswerState, td.CurrentState.StateName) if err != nil { panic(fmt.Sprintf("error advancing from State %v to IncorrectAnswerState %v: %v", td.CurrentState.StateName, clue.IncorrectAnswerState, err)) } // redirect to self to get updated state // FIXME: this will miss answers just submitted now for later clues! http.Redirect(w, r, huntPath+"/"+td.HuntName, http.StatusFound) return } } if correct { // check if there is a CorrectAnswerState and transition to it immediately if clue.CorrectAnswerState != "" { err = StateTransition(td.User, td.HuntName, clue.CorrectAnswerState, td.CurrentState.StateName) if err != nil { panic(fmt.Sprintf("error advancing from State %v to CorrectAnswerState %v: %v", td.CurrentState.StateName, clue.CorrectAnswerState, err)) } // redirect to self now that we have transitioned http.Redirect(w, r, huntPath+"/"+td.HuntName, http.StatusFound) return } } } if cluesHaveAnswers && allCluesCorrect { // could be based on points instead // advance to NextState and redirect to self err = StateTransition(td.User, td.HuntName, td.CurrentState.NextState, td.CurrentState.StateName) if err != nil { panic(fmt.Sprintf("error advancing from State %v to NextState %v: %v", td.CurrentState.StateName, td.CurrentState.NextState, err)) } // redirect http.Redirect(w, r, huntPath+"/"+td.HuntName, http.StatusFound) return } if !cluesHaveAnswers { td.SuppressAnswerBox = true // if these clues don't have answers, we may have a "Forward" / "Back" button submission, check for that and act on it if r.FormValue("Navigate") == "Forward" { //check for an IP address requirement criterion := td.CurrentState.AllowNetMask if criterion != "" { maskchunks := strings.Split(criterion, "/", -1) mask := net.ParseIP(maskchunks[0]) ra := net.ParseIP(r.RemoteAddr) //these are the correct bytes from the IPv6 storage...I tried *desperately* // to get it to make a mask from an IP object but it seems impossible! maskedIP := ra.Mask(net.IPv4Mask(mask[12], mask[13], mask[14], mask[15])) if maskedIP.Equal(net.ParseIP(maskchunks[1])) { err = StateTransition(td.User, td.HuntName, td.CurrentState.NextState, td.CurrentState.StateName) if err != nil { panic(fmt.Sprintf("error advancing from State %v to NextState %v: %v", td.CurrentState.StateName, td.CurrentState.NextState, err)) } // redirect http.Redirect(w, r, huntPath+"/"+td.HuntName, http.StatusFound) } } else { err = StateTransition(td.User, td.HuntName, td.CurrentState.NextState, td.CurrentState.StateName) if err != nil { panic(fmt.Sprintf("error advancing from State %v to NextState %v: %v", td.CurrentState.StateName, td.CurrentState.NextState, err)) } // redirect http.Redirect(w, r, huntPath+"/"+td.HuntName, http.StatusFound) } } if r.FormValue("Navigate") == "Back" { // err = StateTransition(td.User, td.HuntName, currentHuntState.FromState, td.CurrentState.StateName) c.Debugf("Navigate: back with td.CurrentState.PreviousState = %v", td.CurrentState.PreviousState) if td.CurrentState.PreviousState == "" { // no previous state, just reload (but there should have been no button present! c.Debugf("Navigate: back -- no previous state, reloading") http.Redirect(w, r, huntPath+"/"+td.HuntName, http.StatusFound) return } err = StateTransition(td.User, td.HuntName, td.CurrentState.PreviousState, td.CurrentState.StateName) if err != nil { panic(fmt.Sprintf("error retreating from State %v to NextState %v: %v", td.CurrentState.StateName, td.CurrentState.PreviousState, err)) } // redirect http.Redirect(w, r, huntPath+"/"+td.HuntName, http.StatusFound) } } w.Header().Set("Content-Type", "text/html") c.Debugf("handleHunt: calling huntTemplate.Execute on td: %v", td) if err = huntTemplate.Execute(w, td); err != nil { serveError(c, w, err) } return }