// Test the delete function. func TestDelete(t *testing.T) { store := New() // Add some images. addA, _ := jpeg.Decode(base64.NewDecoder(base64.StdEncoding, strings.NewReader(imgA))) addB, _ := jpeg.Decode(base64.NewDecoder(base64.StdEncoding, strings.NewReader(imgB))) hashA, _ := CreateHash(addA) hashB, _ := CreateHash(addB) store.Add("imgA", hashA) store.Add("imgB", hashB) // Delete one. store.Delete("imgA") // Query should only return imgB. query, _ := jpeg.Decode(base64.NewDecoder(base64.StdEncoding, strings.NewReader(imgC))) queryHash, _ := CreateHash(query) matches := store.Query(queryHash) if len(matches) != 1 { t.Errorf("Invalid query result set size, expected 1, is %d", len(matches)) return } if matches[0].ID != "imgB" { t.Errorf("Query found %s but should have found imgB", matches[0].ID) } }
func postBuild(eng *engine.Engine, version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if version.LessThan("1.3") { return fmt.Errorf("Multipart upload for build is no longer supported. Please upgrade your docker client.") } var ( authEncoded = r.Header.Get("X-Registry-Auth") authConfig = &auth.AuthConfig{} configFileEncoded = r.Header.Get("X-Registry-Config") configFile = &auth.ConfigFile{} job = eng.Job("build") ) // This block can be removed when API versions prior to 1.9 are deprecated. // Both headers will be parsed and sent along to the daemon, but if a non-empty // ConfigFile is present, any value provided as an AuthConfig directly will // be overridden. See BuildFile::CmdFrom for details. if version.LessThan("1.9") && authEncoded != "" { authJson := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded)) if err := json.NewDecoder(authJson).Decode(authConfig); err != nil { // for a pull it is not an error if no auth was given // to increase compatibility with the existing api it is defaulting to be empty authConfig = &auth.AuthConfig{} } } if configFileEncoded != "" { configFileJson := base64.NewDecoder(base64.URLEncoding, strings.NewReader(configFileEncoded)) if err := json.NewDecoder(configFileJson).Decode(configFile); err != nil { // for a pull it is not an error if no auth was given // to increase compatibility with the existing api it is defaulting to be empty configFile = &auth.ConfigFile{} } } if version.GreaterThanOrEqualTo("1.8") { job.SetenvBool("json", true) streamJSON(job, w, true) } else { job.Stdout.Add(utils.NewWriteFlusher(w)) } job.Stdin.Add(r.Body) job.Setenv("remote", r.FormValue("remote")) job.Setenv("t", r.FormValue("t")) job.Setenv("q", r.FormValue("q")) job.Setenv("nocache", r.FormValue("nocache")) job.Setenv("rm", r.FormValue("rm")) job.SetenvJson("authConfig", authConfig) job.SetenvJson("configFile", configFile) if err := job.Run(); err != nil { if !job.Stdout.Used() { return err } sf := utils.NewStreamFormatter(version.GreaterThanOrEqualTo("1.8")) w.Write(sf.FormatError(err)) } return nil }
func DecodeRFC2047Word(s string) (string, error) { fields := strings.Split(s, "?") if len(fields) != 5 || fields[0] != "=" || fields[4] != "=" { return "", errors.New("mail: address not RFC 2047 encoded") } charset, encMark := strings.ToLower(fields[1]), strings.ToLower(fields[2]) enc := text.GetEncoding(charset) if enc == nil { return "", fmt.Errorf("mail: charset not supported: %q", charset) } in := bytes.NewBufferString(fields[3]) var r io.Reader switch encMark { case "b": r = base64.NewDecoder(base64.StdEncoding, in) case "q": r = qDecoder{r: in} default: return "", fmt.Errorf("mail: RFC 2047 encoding not supported: %q", encMark) } dec, err := ioutil.ReadAll(text.NewReader(r, enc)) if err != nil { return "", err } return string(dec), err }
func decodeSecureCookie(value string) (user string, session string, err os.Error) { parts := strings.Split(value, "|", 3) if len(parts) != 3 { err = os.NewError("Malformed cookie value") return } val := parts[0] timestamp := parts[1] sig := parts[2] // Check signature if getCookieSig([]byte(val), timestamp) != sig { return "", "", os.NewError("Signature error, cookie is invalid") } // Check time stamp ts, _ := strconv.Atoi64(timestamp) if ts+maxAge < time.UTC().Seconds() { return "", "", os.NewError("Cookie is outdated") } buf := bytes.NewBufferString(val) encoder := base64.NewDecoder(base64.StdEncoding, buf) res, _ := ioutil.ReadAll(encoder) str := string(res) lst := strings.Split(str, "!", -1) if len(lst) != 2 { return "", "", os.NewError("Missing !") } return lst[0], lst[1], nil }
func (ctx *Context) GetSecureCookie(name string) (string, bool) { cookie, ok := ctx.Request.Cookies[name] if !ok { return "", false } parts := strings.Split(cookie, "|", 3) val := parts[0] timestamp := parts[1] sig := parts[2] if getCookieSig([]byte(val), timestamp) != sig { return "", false } ts, _ := strconv.Atoi64(timestamp) if time.Seconds()-31*86400 > ts { return "", false } buf := bytes.NewBufferString(val) encoder := base64.NewDecoder(base64.StdEncoding, buf) res, _ := ioutil.ReadAll(encoder) return string(res), true }
func (s *Server) getImagesSearch(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := parseForm(r); err != nil { return err } var ( config *cliconfig.AuthConfig authEncoded = r.Header.Get("X-Registry-Auth") headers = map[string][]string{} ) if authEncoded != "" { authJson := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded)) if err := json.NewDecoder(authJson).Decode(&config); err != nil { // for a search it is not an error if no auth was given // to increase compatibility with the existing api it is defaulting to be empty config = &cliconfig.AuthConfig{} } } for k, v := range r.Header { if strings.HasPrefix(k, "X-Meta-") { headers[k] = v } } query, err := s.daemon.RegistryService.Search(r.Form.Get("term"), config, headers) if err != nil { return err } return json.NewEncoder(w).Encode(query.Results) }
func getImagesSearch(eng *engine.Engine, version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := parseForm(r); err != nil { return err } var ( authEncoded = r.Header.Get("X-Registry-Auth") authConfig = ®istry.AuthConfig{} metaHeaders = map[string][]string{} ) if authEncoded != "" { authJson := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded)) if err := json.NewDecoder(authJson).Decode(authConfig); err != nil { // for a search it is not an error if no auth was given // to increase compatibility with the existing api it is defaulting to be empty authConfig = ®istry.AuthConfig{} } } for k, v := range r.Header { if strings.HasPrefix(k, "X-Meta-") { metaHeaders[k] = v } } var job = eng.Job("search", r.Form.Get("term")) job.SetenvJson("metaHeaders", metaHeaders) job.SetenvJson("authConfig", authConfig) streamJSON(job, w, false) return job.Run() }
// Creates an image from Pull or from Import func postImagesCreate(eng *engine.Engine, version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := parseForm(r); err != nil { return err } var ( image = r.Form.Get("fromImage") repo = r.Form.Get("repo") tag = r.Form.Get("tag") job *engine.Job ) authEncoded := r.Header.Get("X-Registry-Auth") authConfig := ®istry.AuthConfig{} if authEncoded != "" { authJson := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded)) if err := json.NewDecoder(authJson).Decode(authConfig); err != nil { // for a pull it is not an error if no auth was given // to increase compatibility with the existing api it is defaulting to be empty authConfig = ®istry.AuthConfig{} } } if image != "" { //pull if tag == "" { image, tag = parsers.ParseRepositoryTag(image) } metaHeaders := map[string][]string{} for k, v := range r.Header { if strings.HasPrefix(k, "X-Meta-") { metaHeaders[k] = v } } job = eng.Job("pull", image, tag) job.SetenvBool("parallel", version.GreaterThan("1.3")) job.SetenvJson("metaHeaders", metaHeaders) job.SetenvJson("authConfig", authConfig) } else { //import if tag == "" { repo, tag = parsers.ParseRepositoryTag(repo) } job = eng.Job("import", r.Form.Get("fromSrc"), repo, tag) job.Stdin.Add(r.Body) job.SetenvList("changes", r.Form["changes"]) } if version.GreaterThan("1.0") { job.SetenvBool("json", true) streamJSON(job, w, true) } else { job.Stdout.Add(utils.NewWriteFlusher(w)) } if err := job.Run(); err != nil { if !job.Stdout.Used() { return err } sf := utils.NewStreamFormatter(version.GreaterThan("1.0")) w.Write(sf.FormatError(err)) } return nil }
func TestUploadingBinary(t *testing.T) { if FB_TEST_VALID_ACCESS_TOKEN == "" { t.Skipf("skip this case as we don't have a valid access token.") } buf := bytes.NewBufferString(FB_TEST_BINARY_JPG_FILE) reader := base64.NewDecoder(base64.StdEncoding, buf) session := &Session{} session.SetAccessToken(FB_TEST_VALID_ACCESS_TOKEN) result, e := session.Api("/me/photos", POST, Params{ "message": "Test photo from https://github.com/huandu/facebook", "source": Data("my_profile.jpg", reader), }) if e != nil { t.Fatalf("cannot create photo on my timeline. [e:%v]", e) } var id string e = result.DecodeField("id", &id) if e != nil { t.Fatalf("facebook should return photo id on success. [e:%v]", e) } t.Logf("newly created photo id is %v", id) }
// LoadSharedFilesAscii loads an ASCII-encoded .sia file into the renter. It // returns the nicknames of the loaded files. func (r *Renter) LoadSharedFilesAscii(asciiSia string) ([]string, error) { lockID := r.mu.Lock() defer r.mu.Unlock(lockID) dec := base64.NewDecoder(base64.URLEncoding, bytes.NewBufferString(asciiSia)) return r.loadSharedFiles(dec) }
// CreateService creates a new service in a managed swarm cluster. func (c *Cluster) CreateService(s types.ServiceSpec, encodedAuth string) (*apitypes.ServiceCreateResponse, error) { c.RLock() defer c.RUnlock() if !c.isActiveManager() { return nil, c.errNoManager() } ctx, cancel := c.getRequestContext() defer cancel() err := c.populateNetworkID(ctx, c.client, &s) if err != nil { return nil, err } serviceSpec, err := convert.ServiceSpecToGRPC(s) if err != nil { return nil, err } ctnr := serviceSpec.Task.GetContainer() if ctnr == nil { return nil, fmt.Errorf("service does not use container tasks") } if encodedAuth != "" { ctnr.PullOptions = &swarmapi.ContainerSpec_PullOptions{RegistryAuth: encodedAuth} } // retrieve auth config from encoded auth authConfig := &apitypes.AuthConfig{} if encodedAuth != "" { if err := json.NewDecoder(base64.NewDecoder(base64.URLEncoding, strings.NewReader(encodedAuth))).Decode(authConfig); err != nil { logrus.Warnf("invalid authconfig: %v", err) } } resp := &apitypes.ServiceCreateResponse{} // pin image by digest if os.Getenv("DOCKER_SERVICE_PREFER_OFFLINE_IMAGE") != "1" { digestImage, err := c.imageWithDigestString(ctx, ctnr.Image, authConfig) if err != nil { logrus.Warnf("unable to pin image %s to digest: %s", ctnr.Image, err.Error()) resp.Warnings = append(resp.Warnings, fmt.Sprintf("unable to pin image %s to digest: %s", ctnr.Image, err.Error())) } else { logrus.Debugf("pinning image %s by digest: %s", ctnr.Image, digestImage) ctnr.Image = digestImage } } r, err := c.client.CreateService(ctx, &swarmapi.CreateServiceRequest{Spec: &serviceSpec}) if err != nil { return nil, err } resp.ID = r.Service.ID return resp, nil }
func main() { var r io.Reader r = strings.NewReader(data) r = base64.NewDecoder(base64.StdEncoding, r) r, _ = gzip.NewReader(r) io.Copy(os.Stdout, r) }
func GetPlayback(base64Playback string) *js.Object { var p engine.Playback buf := bytes.NewReader([]byte(base64Playback)) r := base64.NewDecoder(base64.StdEncoding, buf) gob.NewDecoder(r).Decode(&p) return js.MakeWrapper(&p) }
func (b *Builder) statContainerPath(container, path string) (*containerPathStat, error) { query := make(url.Values, 1) query.Set("path", filepath.ToSlash(path)) // Normalize the paths used in the API. urlPath := fmt.Sprintf("/containers/%s/archive?%s", container, query.Encode()) req, err := http.NewRequest("HEAD", b.client.URL.String()+urlPath, nil) if err != nil { return nil, fmt.Errorf("unable to prepare request: %s", err) } resp, err := b.client.HTTPClient.Do(req) if err != nil { return nil, fmt.Errorf("unable to make request: %s", err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { return nil, fmt.Errorf("request failed with status code: %d", resp.StatusCode) } encodedStat := resp.Header.Get("X-Docker-Container-Path-Stat") statDecoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(encodedStat)) var stat containerPathStat if err = json.NewDecoder(statDecoder).Decode(&stat); err != nil { return nil, fmt.Errorf("unable to decode container path stat header: %s", err) } return &stat, nil }
// Decompress the parameters stored within an operation func (op *Operation) DecompressOperationParam() (err error) { defer func() { if e := recover(); e != nil { err = fmt.Errorf("DecompressOperationParam() -> %v", e) } }() if !op.IsCompressed { return nil } pstr, ok := op.Parameters.(string) if !ok { panic("Compressed parameter was not a string") } b := bytes.NewBuffer([]byte(pstr)) rb64 := base64.NewDecoder(base64.StdEncoding, b) r, err := gzip.NewReader(rb64) if err != nil { panic(err) } rb, err := ioutil.ReadAll(r) if err != nil { panic(err) } err = json.Unmarshal(rb, &op.Parameters) if err != nil { panic(err) } op.IsCompressed = false return }
// trim(url_base64(json(token))) + "." + hex(hmac-sha256(base64_str)) func (token *Token) Decode(tokenBytes []byte) error { const signatureLen = 64 // hmac-sha256 bytesArray := bytes.Split(tokenBytes, tokenBytesSplitSep) if len(bytesArray) < 2 { return errors.New("invalid token bytes") } // 验证签名 signatrue := make([]byte, signatureLen) Hash := hmac.New(sha256.New, securitykey.Key) Hash.Write(bytesArray[0]) hex.Encode(signatrue, Hash.Sum(nil)) if !bytes.Equal(signatrue, bytesArray[1]) { return errors.New("invalid token bytes, signature mismatch") } // 解码 temp := signatrue[:4] // signatrue 不再使用, 利用其空间 copy(temp, tokenBytes[len(bytesArray[0]):]) // 保护 tokenBytes defer func() { copy(tokenBytes[len(bytesArray[0]):], temp) // 恢复 tokenBytes token.Signatrue = string(bytesArray[1]) }() base64Bytes := base64Pad(bytesArray[0]) base64Decoder := base64.NewDecoder(base64.URLEncoding, bytes.NewReader(base64Bytes)) return json.NewDecoder(base64Decoder).Decode(token) }
func TestCreateTiles(t *testing.T) { reader := base64.NewDecoder(base64.StdEncoding, strings.NewReader(data)) m, _, err := image.Decode(reader) if err != nil { t.Errorf("Error loading data") } out, err := os.Create("createTiles-test.jpg") if err != nil { t.Errorf("Error creating test jpg") } err = jpeg.Encode(out, m, nil) if err != nil { t.Errorf("Error writing test jpg") } testPic := ImageFile{Name: "createTiles-test.jpg"} testPic.createTiles() if testPic.YEnd != 103 { t.Errorf("YEnd is %v, should be 103", testPic.YEnd) } if testPic.XEnd != 150 { t.Errorf("XEnd is %v, should be 150", testPic.XEnd) } if len(testPic.Tiles) != 165 { t.Errorf("Tile count is %v, should be 165", len(testPic.Tiles)) } os.Remove("createTiles-test.jpg") }
// getCookie gets a base64 and json encoded value from a cookie. func getCookie(r *http.Request, name string, value interface{}) error { c, err := r.Cookie(name) if err != nil { return err } return json.NewDecoder(base64.NewDecoder(base64.URLEncoding, strings.NewReader(c.Value))).Decode(value) }
func (k *SKBKeyringFile) Load() (err error) { G.Log.Debug("+ Loading SKB keyring: %s", k.filename) var packets KeybasePackets var file *os.File if file, err = os.OpenFile(k.filename, os.O_RDONLY, 0); err == nil { stream := base64.NewDecoder(base64.StdEncoding, file) packets, err = DecodePacketsUnchecked(stream) tmp := file.Close() if err == nil && tmp != nil { err = tmp } } if err != nil { if os.IsNotExist(err) { G.Log.Debug("| Keybase secret keyring doesn't exist: %s", k.filename) } else { G.Log.Warning("Error opening %s: %s", k.filename, err) } } else if err == nil { k.Blocks, err = packets.ToListOfSKBs() } G.Log.Debug("- Loaded SKB keyring: %s -> %s", k.filename, ErrToOk(err)) return }
func (pr *pluginRouter) pullPlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return err } metaHeaders := map[string][]string{} for k, v := range r.Header { if strings.HasPrefix(k, "X-Meta-") { metaHeaders[k] = v } } // Get X-Registry-Auth authEncoded := r.Header.Get("X-Registry-Auth") authConfig := &types.AuthConfig{} if authEncoded != "" { authJSON := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded)) if err := json.NewDecoder(authJSON).Decode(authConfig); err != nil { authConfig = &types.AuthConfig{} } } privileges, err := pr.backend.Pull(r.FormValue("name"), metaHeaders, authConfig) if err != nil { return err } return httputils.WriteJSON(w, http.StatusOK, privileges) }
func ExampleBuffer_reader() { // A Buffer can turn a string or a []byte into an io.Reader. buf := bytes.NewBufferString("R29waGVycyBydWxlIQ==") dec := base64.NewDecoder(base64.StdEncoding, buf) io.Copy(os.Stdout, dec) // Output: Gophers rule! }
func (p *protoUnmarshaller) Unmarshal(buffer []byte) (*Entry, error) { decoder := base64.NewDecoder(base64.StdEncoding, bytes.NewBuffer(buffer)) bBuffer := bytes.NewBuffer(nil) if _, err := bBuffer.ReadFrom(decoder); err != nil { return nil, fmt.Errorf("Failed to decode buffer: %s - %s", err.Error(), string(buffer)) } b := bBuffer.Bytes() protoEntry := &ProtoEntry{} if err := proto.Unmarshal(b, protoEntry); err != nil { return nil, fmt.Errorf("Failed to unmarshal protobuf: %s - %s", err.Error(), string(b)) } entry := &Entry{ ID: protoEntry.Id, Time: time.Unix(protoEntry.TimeUnixNsec/int64(time.Second), protoEntry.TimeUnixNsec%int64(time.Second)).UTC(), Level: protoEntry.Level, Contexts: make([]Context, 0), WriterOutput: protoEntry.WriterOutput, } event, err := p.getEvent(protoEntry.EventTypeName, protoEntry.Event) if err != nil { return nil, err } entry.Event = event for contextTypeName, contextBytes := range protoEntry.ContextTypeNameToContext { context, err := p.getContext(contextTypeName, contextBytes) if err != nil { return nil, err } entry.Contexts = append(entry.Contexts, context) } return entry, nil }
func testBodyPartWithStdLib(t *testing.T, originalPart *Message, stdlibPart *multipart.Part) { // decode base64 if exists var stdlibPartBodyReader io.Reader if stdlibPart.Header.Get("Content-Transfer-Encoding") == "base64" { stdlibPart.Header.Del("Content-Transfer-Encoding") stdlibPartBodyReader = base64.NewDecoder(base64.StdEncoding, stdlibPart) } else { stdlibPartBodyReader = stdlibPart } // confirm stdlib headers match our headers if !reflect.DeepEqual(map[string][]string(originalPart.Header), map[string][]string(stdlibPart.Header)) { t.Fatal("Message header does not match its parsed counterpart") } // read content content, err := ioutil.ReadAll(stdlibPartBodyReader) if err != nil || stdlibPart.Close() != nil { t.Fatal("Couldn't read or close part body", err) } // confirm content is deeply equal if !reflect.DeepEqual(originalPart.Body, content) { t.Fatal("Message body does not match its parsed counterpart") } }
func (s *imageRouter) getImagesSearch(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return err } var ( config *types.AuthConfig authEncoded = r.Header.Get("X-Registry-Auth") headers = map[string][]string{} ) if authEncoded != "" { authJSON := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded)) if err := json.NewDecoder(authJSON).Decode(&config); err != nil { // for a search it is not an error if no auth was given // to increase compatibility with the existing api it is defaulting to be empty config = &types.AuthConfig{} } } for k, v := range r.Header { if strings.HasPrefix(k, "X-Meta-") { headers[k] = v } } query, err := s.backend.SearchRegistryForImages(ctx, r.Form.Get("term"), config, headers) if err != nil { return err } return httputils.WriteJSON(w, http.StatusOK, query.Results) }
func (d *Data) decodeBase64() (data []byte, err error) { rawData := bytes.TrimSpace(d.RawData) r := bytes.NewReader(rawData) encr := base64.NewDecoder(base64.StdEncoding, r) var comr io.Reader switch d.Compression { case "gzip": comr, err = gzip.NewReader(encr) if err != nil { return } case "zlib": comr, err = zlib.NewReader(encr) if err != nil { return } case "": comr = encr default: err = UnknownCompression return } return ioutil.ReadAll(comr) }
func WithTestDSN(t *testing.T, tf func(string, <-chan *resultPacket)) { pch := make(chan *resultPacket, 1) s := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { defer req.Body.Close() contentType := req.Header.Get("Content-Type") var bodyReader io.Reader = req.Body // underlying client will compress and encode payload above certain size if contentType == "application/octet-stream" { bodyReader = base64.NewDecoder(base64.StdEncoding, bodyReader) bodyReader, _ = zlib.NewReader(bodyReader) } d := json.NewDecoder(bodyReader) p := &resultPacket{} err := d.Decode(p) if err != nil { t.Fatal(err.Error()) } pch <- p })) defer s.Close() fragments := strings.SplitN(s.URL, "://", 2) dsn := fmt.Sprintf( "%s://public:secret@%s/sentry/project-id", fragments[0], fragments[1], ) tf(dsn, pch) }
func (c *Coder) DecryptId(stringId string) int64 { // if (len(stringId) != 14) { // fmt.Println("bug, stringId:", stringId) // } // base64 decode buf := bytes.NewBufferString(stringId + "==") decoderUrl := base64.NewDecoder(base64.URLEncoding, buf) decoded := make([]byte, 20) n, err := decoderUrl.Read(decoded) if err != nil { log.Fatalf("cannot decode url: %s", err) } //fmt.Println(decoded) decoded = decoded[:n] // decrypt decrypted := make([]byte, 16) c.b.Decrypt(decrypted, decoded[:n]) decrypted = decrypted[:8] //fmt.Println(decoded[:n], decrypted) var id int64 reader := bytes.NewReader(decrypted) binary.Read(reader, binary.LittleEndian, &id) //fmt.Println(id) return id }
// assumes base64'd func createAttachment(content_type, fname string, body io.Reader) NNTPAttachment { media_type, _, err := mime.ParseMediaType(content_type) if err == nil { a := new(nntpAttachment) dec := base64.NewDecoder(base64.StdEncoding, body) _, err = io.Copy(a, dec) if err == nil { a.header = make(textproto.MIMEHeader) a.mime = media_type + "; charset=UTF-8" idx := strings.LastIndex(fname, ".") a.ext = ".txt" if idx > 0 { a.ext = fname[idx:] } a.header.Set("Content-Disposition", `form-data; filename="`+fname+`"; name="attachment"`) a.header.Set("Content-Type", a.mime) a.header.Set("Content-Transfer-Encoding", "base64") h := a.Hash() hashstr := base32.StdEncoding.EncodeToString(h[:]) a.hash = h[:] a.filepath = hashstr + a.ext a.filename = fname return a } } return nil }
func (ctx *Context) GetSecureCookie(name string) (string, bool) { for _, cookie := range ctx.Request.Cookies() { if cookie.Name != name { continue } parts := strings.SplitN(cookie.Value, "|", 3) val := parts[0] timestamp := parts[1] sig := parts[2] if getCookieSig(ctx.Server.Config.CookieSecret, []byte(val), timestamp) != sig { return "", false } ts, _ := strconv.ParseInt(timestamp, 0, 64) if time.Now().Unix()-31*86400 > ts { return "", false } buf := bytes.NewBufferString(val) encoder := base64.NewDecoder(base64.StdEncoding, buf) res, _ := ioutil.ReadAll(encoder) return string(res), true } return "", false }
func (TestGoodsGetter) Get(url string) ([]byte, error) { if strings.Contains(url, "https://api.flickr.com/services/rest/?method=flickr.photos.search&api_key=488c1e7018f1ddf78b09d51a9604622a&media=photos&per_page=400&page=1&format=json") { return mockFlickrResp("1", "5", "2000"), nil } if strings.Contains(url, "https://api.flickr.com/services/rest/?method=flickr.photos.search&api_key=488c1e7018f1ddf78b09d51a9604622a&media=photos&per_page=400&page=2&format=json") { return mockFlickrResp("2", "5", "2000"), nil } if strings.Contains(url, "https://api.flickr.com/services/rest/?method=flickr.photos.search&api_key=488c1e7018f1ddf78b09d51a9604622a&media=photos&per_page=400&page=3&format=json") { return mockFlickrResp("3", "5", "2000"), nil } if strings.Contains(url, "https://api.flickr.com/services/rest/?method=flickr.photos.search&api_key=488c1e7018f1ddf78b09d51a9604622a&media=photos&per_page=400&page=4&format=json") { return mockFlickrResp("4", "5", "2000"), nil } if strings.Contains(url, "https://api.flickr.com/services/rest/?method=flickr.photos.search&api_key=488c1e7018f1ddf78b09d51a9604622a&media=photos&per_page=400&page=5&format=json") { return mockFlickrResp("5", "5", "2000"), nil } if strings.Contains(url, "https://farm") { reader := base64.NewDecoder(base64.StdEncoding, strings.NewReader(data)) m, _, err := image.Decode(reader) if err != nil { fmt.Println("Error loading data") } buf := new(bytes.Buffer) err = png.Encode(buf, convertToNRGBA(m)) if err != nil { fmt.Println("error encoding test image: ", err) } return []byte(buf.Bytes()), nil } return nil, errors.New("Don't recognize URL: " + url) }