func writeFile(f []byte, path string) *model.AppError { if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, awsRegion()) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) ext := filepath.Ext(path) var err error if model.IsFileExtImage(ext) { options := s3.Options{} err = bucket.Put(path, f, model.GetImageMimeType(ext), s3.Private, options) } else { options := s3.Options{} err = bucket.Put(path, f, "binary/octet-stream", s3.Private, options) } if err != nil { return model.NewAppError("writeFile", "Encountered an error writing to S3", err.Error()) } } else if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_LOCAL { if err := writeFileLocally(f, utils.Cfg.FileSettings.Directory+path); err != nil { return err } } else { return model.NewAppError("writeFile", "File storage not configured properly. Please configure for either S3 or local server file storage.", "") } return nil }
func sign(auth aws.Auth, method, path string, params map[string]string, host string) { params["AWSAccessKeyId"] = auth.AccessKey params["SignatureVersion"] = "2" params["SignatureMethod"] = "HmacSHA256" if auth.Token() != "" { params["SecurityToken"] = auth.Token() } // AWS specifies that the parameters in a signed request must // be provided in the natural order of the keys. This is distinct // from the natural order of the encoded value of key=value. // Percent and gocheck.Equals affect the sorting order. var keys, sarray []string for k, _ := range params { keys = append(keys, k) } sort.Strings(keys) for _, k := range keys { sarray = append(sarray, aws.Encode(k)+"="+aws.Encode(params[k])) } // Check whether path has any length and if not set it to / if len(path) == 0 { path = "/" } joined := strings.Join(sarray, "&") payload := method + "\n" + host + "\n" + path + "\n" + joined hash := hmac.New(sha256.New, []byte(auth.SecretKey)) hash.Write([]byte(payload)) signature := make([]byte, b64.EncodedLen(hash.Size())) b64.Encode(signature, hash.Sum(nil)) params["Signature"] = string(signature) }
func moveFile(oldPath, newPath string) *model.AppError { if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { fileData := make(chan []byte) getFileAndForget(oldPath, fileData) fileBytes := <-fileData if fileBytes == nil { return model.NewLocAppError("moveFile", "api.file.move_file.get_from_s3.app_error", nil, "") } var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, awsRegion()) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) if err := bucket.Del(oldPath); err != nil { return model.NewLocAppError("moveFile", "api.file.move_file.delete_from_s3.app_error", nil, err.Error()) } if err := writeFile(fileBytes, newPath); err != nil { return err } } else if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_LOCAL { if err := os.Rename(utils.Cfg.FileSettings.Directory+oldPath, utils.Cfg.FileSettings.Directory+newPath); err != nil { return model.NewLocAppError("moveFile", "api.file.move_file.rename.app_error", nil, err.Error()) } } else { return model.NewLocAppError("moveFile", "api.file.move_file.configured.app_error", nil, "") } return nil }
func readFile(path string) ([]byte, *model.AppError) { if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, awsRegion()) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) // try to get the file from S3 with some basic retry logic tries := 0 for { tries++ f, err := bucket.Get(path) if f != nil { return f, nil } else if tries >= 3 { return nil, model.NewAppError("readFile", "Unable to get file from S3", "path="+path+", err="+err.Error()) } time.Sleep(3000 * time.Millisecond) } } else if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_LOCAL { if f, err := ioutil.ReadFile(utils.Cfg.FileSettings.Directory + path); err != nil { return nil, model.NewAppError("readFile", "Encountered an error reading from local server storage", err.Error()) } else { return f, nil } } else { return nil, model.NewAppError("readFile", "File storage not configured properly. Please configure for either S3 or local server file storage.", "") } }
func WriteFile(f []byte, path string) *model.AppError { if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, awsRegion()) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) ext := filepath.Ext(path) var err error if model.IsFileExtImage(ext) { options := s3.Options{} err = bucket.Put(path, f, model.GetImageMimeType(ext), s3.Private, options) } else { options := s3.Options{} err = bucket.Put(path, f, "binary/octet-stream", s3.Private, options) } if err != nil { return model.NewLocAppError("WriteFile", "api.file.write_file.s3.app_error", nil, err.Error()) } } else if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_LOCAL { if err := WriteFileLocally(f, utils.Cfg.FileSettings.Directory+path); err != nil { return err } } else { return model.NewLocAppError("WriteFile", "api.file.write_file.configured.app_error", nil, "") } return nil }
func ReadFile(path string) ([]byte, *model.AppError) { if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, awsRegion()) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) // try to get the file from S3 with some basic retry logic tries := 0 for { tries++ f, err := bucket.Get(path) if f != nil { return f, nil } else if tries >= 3 { return nil, model.NewLocAppError("ReadFile", "api.file.read_file.get.app_error", nil, "path="+path+", err="+err.Error()) } time.Sleep(3000 * time.Millisecond) } } else if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_LOCAL { if f, err := ioutil.ReadFile(utils.Cfg.FileSettings.Directory + path); err != nil { return nil, model.NewLocAppError("ReadFile", "api.file.read_file.reading_local.app_error", nil, err.Error()) } else { return f, nil } } else { return nil, model.NewLocAppError("ReadFile", "api.file.read_file.configured.app_error", nil, "") } }
func readFile(path string) ([]byte, *model.AppError) { if utils.IsS3Configured() && !utils.Cfg.ServiceSettings.UseLocalStorage { var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) // try to get the file from S3 with some basic retry logic tries := 0 for { tries++ f, err := bucket.Get(path) if f != nil { return f, nil } else if tries >= 3 { return nil, model.NewAppError("readFile", "Unable to get file from S3", "path="+path+", err="+err.Error()) } time.Sleep(3000 * time.Millisecond) } } else if utils.Cfg.ServiceSettings.UseLocalStorage && len(utils.Cfg.ServiceSettings.StorageDirectory) > 0 { if f, err := ioutil.ReadFile(utils.Cfg.ServiceSettings.StorageDirectory + path); err != nil { return nil, model.NewAppError("readFile", "Encountered an error reading from local server storage", err.Error()) } else { return f, nil } } else { return nil, model.NewAppError("readFile", "File storage not configured properly. Please configure for either S3 or local server file storage.", "") } }
func getProfileImage(c *Context, w http.ResponseWriter, r *http.Request) { params := mux.Vars(r) id := params["id"] if result := <-Srv.Store.User().Get(id); result.Err != nil { c.Err = result.Err return } else { var img []byte var err *model.AppError if !utils.IsS3Configured() { img, err = createProfileImage(result.Data.(*model.User).Username, id) if err != nil { c.Err = err return } } else { var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) path := "teams/" + c.Session.TeamId + "/users/" + id + "/profile.png" if data, getErr := bucket.Get(path); getErr != nil { img, err = createProfileImage(result.Data.(*model.User).Username, id) if err != nil { c.Err = err return } options := s3.Options{} if err := bucket.Put(path, img, "image", s3.Private, options); err != nil { c.Err = model.NewAppError("getImage", "Couldn't upload default profile image", err.Error()) return } } else { img = data } } if c.Session.UserId == id { w.Header().Set("Cache-Control", "max-age=300, public") // 5 mins } else { w.Header().Set("Cache-Control", "max-age=86400, public") // 24 hrs } w.Write(img) } }
// Sign SES request as dictated by Amazon's Version 3 signature method. func sign(auth *aws.Auth, method string, headers map[string][]string) string { accessKey, secretKey, _ := auth.Credentials() date := time.Now().UTC().Format(AMZ_DATE_STYLE) h := hmac.New(sha256.New, []byte(secretKey)) h.Write([]byte(date)) signature := base64.StdEncoding.EncodeToString(h.Sum(nil)) authHeader := fmt.Sprintf("AWS3-HTTPS AWSAccessKeyId=%s, Algorithm=HmacSHA256, Signature=%s", accessKey, signature) headers["Date"] = []string{date} headers["X-Amzn-Authorization"] = []string{authHeader} return accessKey }
// ---------------------------------------------------------------------------- // Mechanical Turk signing (http://goo.gl/wrzfn) func sign(auth *aws.Auth, service, method, timestamp string, params map[string]string) { accessKey, secretKey, _ := auth.Credentials() params["AWSAccessKeyId"] = accessKey payload := service + method + timestamp hash := hmac.New(sha1.New, []byte(secretKey)) hash.Write([]byte(payload)) signature := make([]byte, b64.EncodedLen(hash.Size())) b64.Encode(signature, hash.Sum(nil)) params["Signature"] = string(signature) }
func TestUserCreateImage(t *testing.T) { th := Setup() Client := th.CreateClient() b, err := createProfileImage("Corey Hulen", "eo1zkdr96pdj98pjmq8zy35wba") if err != nil { t.Fatal(err) } rdr := bytes.NewReader(b) img, _, err2 := image.Decode(rdr) if err2 != nil { t.Fatal(err) } colorful := color.RGBA{116, 49, 196, 255} if img.At(1, 1) != colorful { t.Fatal("Failed to create correct color") } team := &model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "*****@*****.**", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) user := &model.User{Email: strings.ToLower(model.NewId()) + "*****@*****.**", Nickname: "Corey Hulen", Password: "******"} user = Client.Must(Client.CreateUser(user, "")).Data.(*model.User) LinkUserToTeam(user, team) store.Must(Srv.Store.User().VerifyEmail(user.Id)) Client.Login(user.Email, "pwd") Client.DoApiGet("/users/"+user.Id+"/image", "", "") if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.FileSettings.AmazonS3Region]) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) if err := bucket.Del("/users/" + user.Id + "/profile.png"); err != nil { t.Fatal(err) } } else { path := utils.Cfg.FileSettings.Directory + "/users/" + user.Id + "/profile.png" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } } }
func TestUserCreateImage(t *testing.T) { Setup() b, err := createProfileImage("Corey Hulen", "eo1zkdr96pdj98pjmq8zy35wba") if err != nil { t.Fatal(err) } rdr := bytes.NewReader(b) img, _, err2 := image.Decode(rdr) if err2 != nil { t.Fatal(err) } colorful := color.RGBA{116, 49, 196, 255} if img.At(1, 1) != colorful { t.Fatal("Failed to create correct color") } team := &model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "*****@*****.**", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) user := &model.User{TeamId: team.Id, Email: strings.ToLower(model.NewId()) + "*****@*****.**", Nickname: "Corey Hulen", Password: "******"} user = Client.Must(Client.CreateUser(user, "")).Data.(*model.User) store.Must(Srv.Store.User().VerifyEmail(user.Id)) Client.LoginByEmail(team.Name, user.Email, "pwd") Client.DoGet("/users/"+user.Id+"/image", "", "") if utils.IsS3Configured() && !utils.Cfg.ServiceSettings.UseLocalStorage { var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) if err := bucket.Del("teams/" + user.TeamId + "/users/" + user.Id + "/profile.png"); err != nil { t.Fatal(err) } } else { path := utils.Cfg.ServiceSettings.StorageDirectory + "teams/" + user.TeamId + "/users/" + user.Id + "/profile.png" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } } }
func cleanupTestFile(fullFilename, teamId, channelId, userId string) error { filenames := strings.Split(fullFilename, "/") filename := filenames[len(filenames)-2] + "/" + filenames[len(filenames)-1] fileId := strings.Split(filename, ".")[0] if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { // perform clean-up on s3 var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.FileSettings.AmazonS3Region]) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) if err := bucket.Del("teams/" + teamId + "/channels/" + channelId + "/users/" + userId + "/" + filename); err != nil { return err } if err := bucket.Del("teams/" + teamId + "/channels/" + channelId + "/users/" + userId + "/" + fileId + "_thumb.jpg"); err != nil { return err } if err := bucket.Del("teams/" + teamId + "/channels/" + channelId + "/users/" + userId + "/" + fileId + "_preview.jpg"); err != nil { return err } } else { path := utils.Cfg.FileSettings.Directory + "teams/" + teamId + "/channels/" + channelId + "/users/" + userId + "/" + filename if err := os.Remove(path); err != nil { return fmt.Errorf("Couldn't remove file at " + path) } path = utils.Cfg.FileSettings.Directory + "teams/" + teamId + "/channels/" + channelId + "/users/" + userId + "/" + fileId + "_thumb.jpg" if err := os.Remove(path); err != nil { return fmt.Errorf("Couldn't remove file at " + path) } path = utils.Cfg.FileSettings.Directory + "teams/" + teamId + "/channels/" + channelId + "/users/" + userId + "/" + fileId + "_preview.jpg" if err := os.Remove(path); err != nil { return fmt.Errorf("Couldn't remove file at " + path) } } return nil }
func sign(auth aws.Auth, method, path string, params map[string]string, host string) { params["AWSAccessKeyId"] = auth.AccessKey if auth.Token() != "" { params["SecurityToken"] = auth.Token() } params["SignatureVersion"] = "2" params["SignatureMethod"] = "HmacSHA256" var sarray []string for k, v := range params { sarray = append(sarray, aws.Encode(k)+"="+aws.Encode(v)) } sort.StringSlice(sarray).Sort() joined := strings.Join(sarray, "&") payload := method + "\n" + host + "\n" + path + "\n" + joined hash := hmac.New(sha256.New, []byte(auth.SecretKey)) hash.Write([]byte(payload)) signature := make([]byte, b64.EncodedLen(hash.Size())) b64.Encode(signature, hash.Sum(nil)) params["Signature"] = string(signature) }
func writeFile(f []byte, path string) *model.AppError { if utils.IsS3Configured() && !utils.Cfg.ServiceSettings.UseLocalStorage { var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) ext := filepath.Ext(path) var err error if model.IsFileExtImage(ext) { options := s3.Options{} err = bucket.Put(path, f, model.GetImageMimeType(ext), s3.Private, options) } else { options := s3.Options{} err = bucket.Put(path, f, "binary/octet-stream", s3.Private, options) } if err != nil { return model.NewAppError("writeFile", "Encountered an error writing to S3", err.Error()) } } else if utils.Cfg.ServiceSettings.UseLocalStorage && len(utils.Cfg.ServiceSettings.StorageDirectory) > 0 { if err := os.MkdirAll(filepath.Dir(utils.Cfg.ServiceSettings.StorageDirectory+path), 0774); err != nil { return model.NewAppError("writeFile", "Encountered an error creating the directory for the new file", err.Error()) } if err := ioutil.WriteFile(utils.Cfg.ServiceSettings.StorageDirectory+path, f, 0644); err != nil { return model.NewAppError("writeFile", "Encountered an error writing to local server storage", err.Error()) } } else { return model.NewAppError("writeFile", "File storage not configured properly. Please configure for either S3 or local server file storage.", "") } return nil }
func sign(auth *aws.Auth, method, path string, params url.Values, headers http.Header) { var host string for k, v := range headers { k = strings.ToLower(k) switch k { case "host": host = v[0] } } accessKey, secretKey, token := auth.Credentials() // set up some defaults used for signing the request params["AWSAccessKeyId"] = []string{accessKey} params["SignatureVersion"] = []string{"2"} params["SignatureMethod"] = []string{"HmacSHA256"} if token != "" { params["SecurityToken"] = []string{token} } // join up all the incoming params var sarray []string for k, v := range params { sarray = append(sarray, aws.Encode(k)+"="+aws.Encode(v[0])) } sort.StringSlice(sarray).Sort() joined := strings.Join(sarray, "&") // create the payload, sign it and create the signature payload := strings.Join([]string{method, host, "/", joined}, "\n") hash := hmac.New(sha256.New, []byte(secretKey)) hash.Write([]byte(payload)) signature := make([]byte, b64.EncodedLen(hash.Size())) b64.Encode(signature, hash.Sum(nil)) // add the signature to the outgoing params params["Signature"] = []string{string(signature)} }
func sign(auth *aws.Auth, method, path string, params map[string]string, host string) { accessKey, secretKey, _ := auth.Credentials() params["AWSAccessKeyId"] = accessKey params["SignatureVersion"] = "2" params["SignatureMethod"] = "HmacSHA256" var keys, sarray []string for k := range params { keys = append(keys, k) } sort.Strings(keys) for _, k := range keys { sarray = append(sarray, aws.Encode(k)+"="+aws.Encode(params[k])) } joined := strings.Join(sarray, "&") payload := method + "\n" + host + "\n" + path + "\n" + joined hash := hmac.New(sha256.New, []byte(secretKey)) hash.Write([]byte(payload)) signature := make([]byte, b64.EncodedLen(hash.Size())) b64.Encode(signature, hash.Sum(nil)) params["Signature"] = string(signature) }
func TestGetFile(t *testing.T) { th := Setup().InitBasic() Client := th.BasicClient team := th.BasicTeam user := th.BasicUser channel := th.BasicChannel if utils.Cfg.FileSettings.DriverName != "" { body := &bytes.Buffer{} writer := multipart.NewWriter(body) part, err := writer.CreateFormFile("files", "test.png") if err != nil { t.Fatal(err) } path := utils.FindDir("tests") file, err := os.Open(path + "/test.png") if err != nil { t.Fatal(err) } defer file.Close() _, err = io.Copy(part, file) if err != nil { t.Fatal(err) } field, err := writer.CreateFormField("channel_id") if err != nil { t.Fatal(err) } _, err = field.Write([]byte(channel.Id)) if err != nil { t.Fatal(err) } err = writer.Close() if err != nil { t.Fatal(err) } resp, upErr := Client.UploadPostAttachment(body.Bytes(), writer.FormDataContentType()) if upErr != nil { t.Fatal(upErr) } filenames := resp.Data.(*model.FileUploadResponse).Filenames // wait a bit for files to ready time.Sleep(5 * time.Second) if _, downErr := Client.GetFile(filenames[0], false); downErr != nil { t.Fatal(downErr) } if resp, downErr := Client.GetFileInfo(filenames[0]); downErr != nil { t.Fatal(downErr) } else { info := resp.Data.(*model.FileInfo) if info.Size == 0 { t.Fatal("No file size returned") } } if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.FileSettings.AmazonS3Region]) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) filenames := strings.Split(resp.Data.(*model.FileUploadResponse).Filenames[0], "/") filename := filenames[len(filenames)-2] + "/" + filenames[len(filenames)-1] fileId := strings.Split(filename, ".")[0] err = bucket.Del("teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + filename) if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + fileId + "_thumb.jpg") if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + fileId + "_preview.jpg") if err != nil { t.Fatal(err) } } else { filenames := strings.Split(resp.Data.(*model.FileUploadResponse).Filenames[0], "/") filename := filenames[len(filenames)-2] + "/" + filenames[len(filenames)-1] fileId := strings.Split(filename, ".")[0] path := utils.Cfg.FileSettings.Directory + "teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + filename if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } path = utils.Cfg.FileSettings.Directory + "teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + fileId + "_thumb.jpg" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } path = utils.Cfg.FileSettings.Directory + "teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + fileId + "_preview.jpg" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } } } else { if _, downErr := Client.GetFile("/files/get/yxebdmbz5pgupx7q6ez88rw11a/n3btzxu9hbnapqk36iwaxkjxhc/junk.jpg", false); downErr.StatusCode != http.StatusNotImplemented { t.Fatal("Status code should have been 501 - Not Implemented") } } }
func TestUploadFile(t *testing.T) { th := Setup().InitBasic() Client := th.BasicClient team := th.BasicTeam user := th.BasicUser channel := th.BasicChannel body := &bytes.Buffer{} writer := multipart.NewWriter(body) part, err := writer.CreateFormFile("files", "../test.png") if err != nil { t.Fatal(err) } path := utils.FindDir("tests") file, err := os.Open(path + "/test.png") if err != nil { t.Fatal(err) } defer file.Close() _, err = io.Copy(part, file) if err != nil { t.Fatal(err) } field, err := writer.CreateFormField("channel_id") if err != nil { t.Fatal(err) } _, err = field.Write([]byte(channel.Id)) if err != nil { t.Fatal(err) } err = writer.Close() if err != nil { t.Fatal(err) } resp, appErr := Client.UploadPostAttachment(body.Bytes(), writer.FormDataContentType()) if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { if appErr != nil { t.Fatal(appErr) } filenames := strings.Split(resp.Data.(*model.FileUploadResponse).Filenames[0], "/") filename := filenames[len(filenames)-2] + "/" + filenames[len(filenames)-1] if strings.Contains(filename, "../") { t.Fatal("relative path should have been sanitized out") } fileId := strings.Split(filename, ".")[0] var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.FileSettings.AmazonS3Region]) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) // wait a bit for files to ready time.Sleep(5 * time.Second) err = bucket.Del("teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + filename) if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + fileId + "_thumb.jpg") if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + fileId + "_preview.jpg") if err != nil { t.Fatal(err) } } else if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_LOCAL { filenames := strings.Split(resp.Data.(*model.FileUploadResponse).Filenames[0], "/") filename := filenames[len(filenames)-2] + "/" + filenames[len(filenames)-1] if strings.Contains(filename, "../") { t.Fatal("relative path should have been sanitized out") } fileId := strings.Split(filename, ".")[0] // wait a bit for files to ready time.Sleep(5 * time.Second) path := utils.Cfg.FileSettings.Directory + "teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + filename if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } path = utils.Cfg.FileSettings.Directory + "teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + fileId + "_thumb.jpg" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } path = utils.Cfg.FileSettings.Directory + "teams/" + team.Id + "/channels/" + channel.Id + "/users/" + user.Id + "/" + fileId + "_preview.jpg" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } } else { if appErr == nil { t.Fatal("S3 and local storage not configured, should have failed") } } }
func sign(auth *aws.Auth, method, canonicalPath string, params, headers map[string][]string) { accessKey, secretKey, token := auth.Credentials() if token != "" { headers["X-Amz-Security-Token"] = []string{token} } var md5, ctype, date, xamz string var xamzDate bool var sarray keySortableTupleList for k, v := range headers { k = strings.ToLower(k) switch k { case "content-md5": md5 = v[0] case "content-type": ctype = v[0] case "date": if !xamzDate { date = v[0] } default: if strings.HasPrefix(k, "x-amz-") { vall := strings.Join(v, ",") sarray = append(sarray, keySortableTuple{k, k + ":" + vall}) if k == "x-amz-date" { xamzDate = true date = "" } } } } if len(sarray) > 0 { sort.Sort(sarray) xamz = strings.Join(sarray.StringSlice(), "\n") + "\n" } expires := false if v, ok := params["Expires"]; ok { // Query string request authentication alternative. expires = true date = v[0] params["AWSAccessKeyId"] = []string{accessKey} } sarray = sarray[0:0] for k, v := range params { if s3ParamsToSign[k] { for _, vi := range v { if vi == "" { sarray = append(sarray, keySortableTuple{k, k}) } else { // "When signing you do not encode these values." sarray = append(sarray, keySortableTuple{k, k + "=" + vi}) } } } } if len(sarray) > 0 { sort.Sort(sarray) canonicalPath = canonicalPath + "?" + strings.Join(sarray.StringSlice(), "&") } payload := method + "\n" + md5 + "\n" + ctype + "\n" + date + "\n" + xamz + canonicalPath hash := hmac.New(sha1.New, []byte(secretKey)) hash.Write([]byte(payload)) signature := make([]byte, b64.EncodedLen(hash.Size())) b64.Encode(signature, hash.Sum(nil)) if expires { params["Signature"] = []string{string(signature)} } else { headers["Authorization"] = []string{"AWS " + accessKey + ":" + string(signature)} } if debug { log.Printf("Signature payload: %q", payload) log.Printf("Signature: %q", signature) } }
func TestGetPublicLink(t *testing.T) { Setup() team := &model.Team{Name: "Name", Domain: "z-z-" + model.NewId() + "a", Email: "*****@*****.**", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) user1 := &model.User{TeamId: team.Id, Email: model.NewId() + "*****@*****.**", FullName: "Corey Hulen", Password: "******"} user1 = Client.Must(Client.CreateUser(user1, "")).Data.(*model.User) Srv.Store.User().VerifyEmail(user1.Id) user2 := &model.User{TeamId: team.Id, Email: model.NewId() + "*****@*****.**", FullName: "Corey Hulen", Password: "******"} user2 = Client.Must(Client.CreateUser(user2, "")).Data.(*model.User) Srv.Store.User().VerifyEmail(user2.Id) Client.LoginByEmail(team.Domain, user1.Email, "pwd") channel1 := &model.Channel{DisplayName: "Test API Name", Name: "a" + model.NewId() + "a", Type: model.CHANNEL_OPEN, TeamId: team.Id} channel1 = Client.Must(Client.CreateChannel(channel1)).Data.(*model.Channel) if utils.IsS3Configured() { body := &bytes.Buffer{} writer := multipart.NewWriter(body) part, err := writer.CreateFormFile("files", "test.png") if err != nil { t.Fatal(err) } path := utils.FindDir("web/static/images") file, err := os.Open(path + "/test.png") if err != nil { t.Fatal(err) } defer file.Close() _, err = io.Copy(part, file) if err != nil { t.Fatal(err) } field, err := writer.CreateFormField("channel_id") if err != nil { t.Fatal(err) } _, err = field.Write([]byte(channel1.Id)) if err != nil { t.Fatal(err) } err = writer.Close() if err != nil { t.Fatal(err) } resp, upErr := Client.UploadFile("/files/upload", body.Bytes(), writer.FormDataContentType()) if upErr != nil { t.Fatal(upErr) } filenames := resp.Data.(*model.FileUploadResponse).Filenames post1 := &model.Post{ChannelId: channel1.Id, Message: "a" + model.NewId() + "a", Filenames: filenames} rpost1, postErr := Client.CreatePost(post1) if postErr != nil { t.Fatal(postErr) } if rpost1.Data.(*model.Post).Filenames[0] != filenames[0] { t.Fatal("filenames don't match") } // wait a bit for files to ready time.Sleep(5 * time.Second) data := make(map[string]string) data["filename"] = filenames[0] if _, err := Client.GetPublicLink(data); err != nil { t.Fatal(err) } data["filename"] = "junk" if _, err := Client.GetPublicLink(data); err == nil { t.Fatal("Should have errored - bad file path") } Client.LoginByEmail(team.Domain, user2.Email, "pwd") data["filename"] = filenames[0] if _, err := Client.GetPublicLink(data); err == nil { t.Fatal("should have errored, user not member of channel") } // perform clean-up on s3 var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) fileId := strings.Split(filenames[0], ".")[0] if err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + rpost1.Data.(*model.Post).UserId + "/" + filenames[0]); err != nil { t.Fatal(err) } if err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + rpost1.Data.(*model.Post).UserId + "/" + fileId + "_thumb.jpg"); err != nil { t.Fatal(err) } if err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + rpost1.Data.(*model.Post).UserId + "/" + fileId + "_preview.png"); err != nil { t.Fatal(err) } } else { data := make(map[string]string) if _, err := Client.GetPublicLink(data); err.StatusCode != http.StatusNotImplemented { t.Fatal("Status code should have been 501 - Not Implemented") } } }
func TestUploadFile(t *testing.T) { Setup() team := &model.Team{Name: "Name", Domain: "z-z-" + model.NewId() + "a", Email: "*****@*****.**", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) user1 := &model.User{TeamId: team.Id, Email: model.NewId() + "*****@*****.**", FullName: "Corey Hulen", Password: "******"} user1 = Client.Must(Client.CreateUser(user1, "")).Data.(*model.User) Srv.Store.User().VerifyEmail(user1.Id) Client.LoginByEmail(team.Domain, user1.Email, "pwd") channel1 := &model.Channel{DisplayName: "Test API Name", Name: "a" + model.NewId() + "a", Type: model.CHANNEL_OPEN, TeamId: team.Id} channel1 = Client.Must(Client.CreateChannel(channel1)).Data.(*model.Channel) body := &bytes.Buffer{} writer := multipart.NewWriter(body) part, err := writer.CreateFormFile("files", "test.png") if err != nil { t.Fatal(err) } path := utils.FindDir("web/static/images") file, err := os.Open(path + "/test.png") defer file.Close() _, err = io.Copy(part, file) if err != nil { t.Fatal(err) } field, err := writer.CreateFormField("channel_id") if err != nil { t.Fatal(err) } _, err = field.Write([]byte(channel1.Id)) if err != nil { t.Fatal(err) } err = writer.Close() if err != nil { t.Fatal(err) } resp, appErr := Client.UploadFile("/files/upload", body.Bytes(), writer.FormDataContentType()) if utils.IsS3Configured() { if appErr != nil { t.Fatal(appErr) } filenames := resp.Data.(*model.FileUploadResponse).Filenames var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) fileId := strings.Split(filenames[0], ".")[0] // wait a bit for files to ready time.Sleep(5 * time.Second) err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + filenames[0]) if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + fileId + "_thumb.jpg") if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + fileId + "_preview.png") if err != nil { t.Fatal(err) } } else { if appErr == nil { t.Fatal("S3 not configured, should have failed") } } }
func TestGetFile(t *testing.T) { Setup() team := &model.Team{Name: "Name", Domain: "z-z-" + model.NewId() + "a", Email: "*****@*****.**", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) user1 := &model.User{TeamId: team.Id, Email: model.NewId() + "*****@*****.**", FullName: "Corey Hulen", Password: "******"} user1 = Client.Must(Client.CreateUser(user1, "")).Data.(*model.User) Srv.Store.User().VerifyEmail(user1.Id) Client.LoginByEmail(team.Domain, user1.Email, "pwd") channel1 := &model.Channel{DisplayName: "Test API Name", Name: "a" + model.NewId() + "a", Type: model.CHANNEL_OPEN, TeamId: team.Id} channel1 = Client.Must(Client.CreateChannel(channel1)).Data.(*model.Channel) if utils.IsS3Configured() { body := &bytes.Buffer{} writer := multipart.NewWriter(body) part, err := writer.CreateFormFile("files", "test.png") if err != nil { t.Fatal(err) } path := utils.FindDir("web/static/images") file, err := os.Open(path + "/test.png") if err != nil { t.Fatal(err) } defer file.Close() _, err = io.Copy(part, file) if err != nil { t.Fatal(err) } field, err := writer.CreateFormField("channel_id") if err != nil { t.Fatal(err) } _, err = field.Write([]byte(channel1.Id)) if err != nil { t.Fatal(err) } err = writer.Close() if err != nil { t.Fatal(err) } resp, upErr := Client.UploadFile("/files/upload", body.Bytes(), writer.FormDataContentType()) if upErr != nil { t.Fatal(upErr) } filenames := resp.Data.(*model.FileUploadResponse).Filenames // wait a bit for files to ready time.Sleep(5 * time.Second) if _, downErr := Client.GetFile(filenames[0], true); downErr != nil { t.Fatal("file get failed") } team2 := &model.Team{Name: "Name", Domain: "z-z-" + model.NewId() + "a", Email: "*****@*****.**", Type: model.TEAM_OPEN} team2 = Client.Must(Client.CreateTeam(team2)).Data.(*model.Team) user2 := &model.User{TeamId: team2.Id, Email: model.NewId() + "*****@*****.**", FullName: "Corey Hulen", Password: "******"} user2 = Client.Must(Client.CreateUser(user2, "")).Data.(*model.User) Srv.Store.User().VerifyEmail(user2.Id) newProps := make(map[string]string) newProps["filename"] = filenames[0] newProps["time"] = fmt.Sprintf("%v", model.GetMillis()) data := model.MapToJson(newProps) hash := model.HashPassword(fmt.Sprintf("%v:%v", data, utils.Cfg.ServiceSettings.PublicLinkSalt)) Client.LoginByEmail(team2.Domain, user2.Email, "pwd") if _, downErr := Client.GetFile(filenames[0]+"?d="+url.QueryEscape(data)+"&h="+url.QueryEscape(hash)+"&t="+team.Id, true); downErr != nil { t.Fatal(downErr) } if _, downErr := Client.GetFile(filenames[0]+"?d="+url.QueryEscape(data)+"&h="+url.QueryEscape(hash), true); downErr == nil { t.Fatal("Should have errored - missing team id") } if _, downErr := Client.GetFile(filenames[0]+"?d="+url.QueryEscape(data)+"&h="+url.QueryEscape(hash)+"&t=junk", true); downErr == nil { t.Fatal("Should have errored - bad team id") } if _, downErr := Client.GetFile(filenames[0]+"?d="+url.QueryEscape(data)+"&h="+url.QueryEscape(hash)+"&t=12345678901234567890123456", true); downErr == nil { t.Fatal("Should have errored - bad team id") } if _, downErr := Client.GetFile(filenames[0]+"?d="+url.QueryEscape(data)+"&t="+team.Id, true); downErr == nil { t.Fatal("Should have errored - missing hash") } if _, downErr := Client.GetFile(filenames[0]+"?d="+url.QueryEscape(data)+"&h=junk&t="+team.Id, true); downErr == nil { t.Fatal("Should have errored - bad hash") } if _, downErr := Client.GetFile(filenames[0]+"?h="+url.QueryEscape(hash)+"&t="+team.Id, true); downErr == nil { t.Fatal("Should have errored - missing data") } if _, downErr := Client.GetFile(filenames[0]+"?d=junk&h="+url.QueryEscape(hash)+"&t="+team.Id, true); downErr == nil { t.Fatal("Should have errored - bad data") } if _, downErr := Client.GetFile(filenames[0], true); downErr == nil { t.Fatal("Should have errored - user not logged in and link not public") } var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) fileId := strings.Split(filenames[0], ".")[0] err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + filenames[0]) if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + fileId + "_thumb.jpg") if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + fileId + "_preview.png") if err != nil { t.Fatal(err) } } else { if _, downErr := Client.GetFile("/files/get/yxebdmbz5pgupx7q6ez88rw11a/n3btzxu9hbnapqk36iwaxkjxhc/junk.jpg", false); downErr.StatusCode != http.StatusNotImplemented { t.Fatal("Status code should have been 501 - Not Implemented") } } }
func TestUploadFile(t *testing.T) { Setup() team := &model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "*****@*****.**", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) user1 := &model.User{TeamId: team.Id, Email: model.NewId() + "*****@*****.**", Nickname: "Corey Hulen", Password: "******"} user1 = Client.Must(Client.CreateUser(user1, "")).Data.(*model.User) store.Must(Srv.Store.User().VerifyEmail(user1.Id)) Client.LoginByEmail(team.Name, user1.Email, "pwd") channel1 := &model.Channel{DisplayName: "Test API Name", Name: "a" + model.NewId() + "a", Type: model.CHANNEL_OPEN, TeamId: team.Id} channel1 = Client.Must(Client.CreateChannel(channel1)).Data.(*model.Channel) body := &bytes.Buffer{} writer := multipart.NewWriter(body) part, err := writer.CreateFormFile("files", "../test.png") if err != nil { t.Fatal(err) } path := utils.FindDir("web/static/images") file, err := os.Open(path + "/test.png") defer file.Close() _, err = io.Copy(part, file) if err != nil { t.Fatal(err) } field, err := writer.CreateFormField("channel_id") if err != nil { t.Fatal(err) } _, err = field.Write([]byte(channel1.Id)) if err != nil { t.Fatal(err) } err = writer.Close() if err != nil { t.Fatal(err) } resp, appErr := Client.UploadFile("/files/upload", body.Bytes(), writer.FormDataContentType()) if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { if appErr != nil { t.Fatal(appErr) } filenames := strings.Split(resp.Data.(*model.FileUploadResponse).Filenames[0], "/") filename := filenames[len(filenames)-2] + "/" + filenames[len(filenames)-1] if strings.Contains(filename, "../") { t.Fatal("relative path should have been sanitized out") } fileId := strings.Split(filename, ".")[0] var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.FileSettings.AmazonS3Region]) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) // wait a bit for files to ready time.Sleep(5 * time.Second) err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + filename) if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + fileId + "_thumb.jpg") if err != nil { t.Fatal(err) } err = bucket.Del("teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + fileId + "_preview.jpg") if err != nil { t.Fatal(err) } } else if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_LOCAL { filenames := strings.Split(resp.Data.(*model.FileUploadResponse).Filenames[0], "/") filename := filenames[len(filenames)-2] + "/" + filenames[len(filenames)-1] if strings.Contains(filename, "../") { t.Fatal("relative path should have been sanitized out") } fileId := strings.Split(filename, ".")[0] // wait a bit for files to ready time.Sleep(5 * time.Second) path := utils.Cfg.FileSettings.Directory + "teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + filename if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } path = utils.Cfg.FileSettings.Directory + "teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + fileId + "_thumb.jpg" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } path = utils.Cfg.FileSettings.Directory + "teams/" + team.Id + "/channels/" + channel1.Id + "/users/" + user1.Id + "/" + fileId + "_preview.jpg" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } } else { if appErr == nil { t.Fatal("S3 and local storage not configured, should have failed") } } }
func TestUserUploadProfileImage(t *testing.T) { Setup() team := &model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "*****@*****.**", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) user := &model.User{TeamId: team.Id, Email: strings.ToLower(model.NewId()) + "*****@*****.**", Nickname: "Corey Hulen", Password: "******"} user = Client.Must(Client.CreateUser(user, "")).Data.(*model.User) store.Must(Srv.Store.User().VerifyEmail(user.Id)) if utils.IsS3Configured() || utils.Cfg.ServiceSettings.UseLocalStorage { body := &bytes.Buffer{} writer := multipart.NewWriter(body) if _, upErr := Client.UploadFile("/users/newimage", body.Bytes(), writer.FormDataContentType()); upErr == nil { t.Fatal("Should have errored") } Client.LoginByEmail(team.Name, user.Email, "pwd") if _, upErr := Client.UploadFile("/users/newimage", body.Bytes(), writer.FormDataContentType()); upErr == nil { t.Fatal("Should have errored") } part, err := writer.CreateFormFile("blargh", "test.png") if err != nil { t.Fatal(err) } path := utils.FindDir("web/static/images") file, err := os.Open(path + "/test.png") if err != nil { t.Fatal(err) } defer file.Close() _, err = io.Copy(part, file) if err != nil { t.Fatal(err) } if err := writer.Close(); err != nil { t.Fatal(err) } if _, upErr := Client.UploadFile("/users/newimage", body.Bytes(), writer.FormDataContentType()); upErr == nil { t.Fatal("Should have errored") } file2, err := os.Open(path + "/test.png") if err != nil { t.Fatal(err) } defer file2.Close() body = &bytes.Buffer{} writer = multipart.NewWriter(body) part, err = writer.CreateFormFile("image", "test.png") if err != nil { t.Fatal(err) } if _, err := io.Copy(part, file2); err != nil { t.Fatal(err) } if err := writer.Close(); err != nil { t.Fatal(err) } if _, upErr := Client.UploadFile("/users/newimage", body.Bytes(), writer.FormDataContentType()); upErr != nil { t.Fatal(upErr) } Client.DoGet("/users/"+user.Id+"/image", "", "") if utils.IsS3Configured() && !utils.Cfg.ServiceSettings.UseLocalStorage { var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) if err := bucket.Del("teams/" + user.TeamId + "/users/" + user.Id + "/profile.png"); err != nil { t.Fatal(err) } } else { path := utils.Cfg.ServiceSettings.StorageDirectory + "teams/" + user.TeamId + "/users/" + user.Id + "/profile.png" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } } } else { body := &bytes.Buffer{} writer := multipart.NewWriter(body) if _, upErr := Client.UploadFile("/users/newimage", body.Bytes(), writer.FormDataContentType()); upErr.StatusCode != http.StatusNotImplemented { t.Fatal("Should have failed with 501 - Not Implemented") } } }
func fireAndForgetHandleImages(filenames []string, fileData [][]byte, teamId, channelId, userId string) { go func() { var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) dest := "teams/" + teamId + "/channels/" + channelId + "/users/" + userId + "/" for i, filename := range filenames { name := filename[:strings.LastIndex(filename, ".")] go func() { // Decode image bytes into Image object img, _, err := image.Decode(bytes.NewReader(fileData[i])) if err != nil { l4g.Error("Unable to decode image channelId=%v userId=%v filename=%v err=%v", channelId, userId, filename, err) return } // Decode image config imgConfig, _, err := image.DecodeConfig(bytes.NewReader(fileData[i])) if err != nil { l4g.Error("Unable to decode image config channelId=%v userId=%v filename=%v err=%v", channelId, userId, filename, err) return } // Create thumbnail go func() { var thumbnail image.Image if imgConfig.Width > int(utils.Cfg.ImageSettings.ThumbnailWidth) { thumbnail = resize.Resize(utils.Cfg.ImageSettings.ThumbnailWidth, utils.Cfg.ImageSettings.ThumbnailHeight, img, resize.NearestNeighbor) } else { thumbnail = img } buf := new(bytes.Buffer) err = jpeg.Encode(buf, thumbnail, &jpeg.Options{Quality: 90}) if err != nil { l4g.Error("Unable to encode image as jpeg channelId=%v userId=%v filename=%v err=%v", channelId, userId, filename, err) return } // Upload thumbnail to S3 options := s3.Options{} err = bucket.Put(dest+name+"_thumb.jpg", buf.Bytes(), "image/jpeg", s3.Private, options) if err != nil { l4g.Error("Unable to upload thumbnail to S3 channelId=%v userId=%v filename=%v err=%v", channelId, userId, filename, err) return } }() // Create preview go func() { var preview image.Image if imgConfig.Width > int(utils.Cfg.ImageSettings.PreviewWidth) { preview = resize.Resize(utils.Cfg.ImageSettings.PreviewWidth, utils.Cfg.ImageSettings.PreviewHeight, img, resize.NearestNeighbor) } else { preview = img } buf := new(bytes.Buffer) err = jpeg.Encode(buf, preview, &jpeg.Options{Quality: 90}) //err = png.Encode(buf, preview) if err != nil { l4g.Error("Unable to encode image as preview jpg channelId=%v userId=%v filename=%v err=%v", channelId, userId, filename, err) return } // Upload preview to S3 options := s3.Options{} err = bucket.Put(dest+name+"_preview.jpg", buf.Bytes(), "image/jpeg", s3.Private, options) if err != nil { l4g.Error("Unable to upload preview to S3 channelId=%v userId=%v filename=%v err=%v", channelId, userId, filename, err) return } }() }() } }() }
func getFile(c *Context, w http.ResponseWriter, r *http.Request) { if !utils.IsS3Configured() { c.Err = model.NewAppError("getFile", "Unable to get file. Amazon S3 not configured. ", "") c.Err.StatusCode = http.StatusNotImplemented return } params := mux.Vars(r) channelId := params["channel_id"] if len(channelId) != 26 { c.SetInvalidParam("getFile", "channel_id") return } userId := params["user_id"] if len(userId) != 26 { c.SetInvalidParam("getFile", "user_id") return } filename := params["filename"] if len(filename) == 0 { c.SetInvalidParam("getFile", "filename") return } hash := r.URL.Query().Get("h") data := r.URL.Query().Get("d") teamId := r.URL.Query().Get("t") cchan := Srv.Store.Channel().CheckPermissionsTo(c.Session.TeamId, channelId, c.Session.UserId) var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) path := "" if len(teamId) == 26 { path = "teams/" + teamId + "/channels/" + channelId + "/users/" + userId + "/" + filename } else { path = "teams/" + c.Session.TeamId + "/channels/" + channelId + "/users/" + userId + "/" + filename } fileData := make(chan []byte) asyncGetFile(bucket, path, fileData) if len(hash) > 0 && len(data) > 0 && len(teamId) == 26 { if !model.ComparePassword(hash, fmt.Sprintf("%v:%v", data, utils.Cfg.ServiceSettings.PublicLinkSalt)) { c.Err = model.NewAppError("getFile", "The public link does not appear to be valid", "") return } props := model.MapFromJson(strings.NewReader(data)) t, err := strconv.ParseInt(props["time"], 10, 64) if err != nil || model.GetMillis()-t > 1000*60*60*24*7 { // one week c.Err = model.NewAppError("getFile", "The public link has expired", "") return } } else if !c.HasPermissionsToChannel(cchan, "getFile") { return } f := <-fileData if f == nil { var f2 []byte tries := 0 for { time.Sleep(3000 * time.Millisecond) tries++ asyncGetFile(bucket, path, fileData) f2 = <-fileData if f2 != nil { w.Header().Set("Cache-Control", "max-age=2592000, public") w.Header().Set("Content-Length", strconv.Itoa(len(f2))) w.Write(f2) return } else if tries >= 2 { break } } c.Err = model.NewAppError("getFile", "Could not find file.", "url extenstion: "+path) c.Err.StatusCode = http.StatusNotFound return } w.Header().Set("Cache-Control", "max-age=2592000, public") w.Header().Set("Content-Length", strconv.Itoa(len(f))) w.Write(f) }
func uploadFile(c *Context, w http.ResponseWriter, r *http.Request) { if !utils.IsS3Configured() { c.Err = model.NewAppError("uploadFile", "Unable to upload file. Amazon S3 not configured. ", "") c.Err.StatusCode = http.StatusNotImplemented return } err := r.ParseMultipartForm(model.MAX_FILE_SIZE) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) m := r.MultipartForm props := m.Value if len(props["channel_id"]) == 0 { c.SetInvalidParam("uploadFile", "channel_id") return } channelId := props["channel_id"][0] if len(channelId) == 0 { c.SetInvalidParam("uploadFile", "channel_id") return } cchan := Srv.Store.Channel().CheckPermissionsTo(c.Session.TeamId, channelId, c.Session.UserId) files := m.File["files"] resStruct := &model.FileUploadResponse{ Filenames: []string{}} imageNameList := []string{} imageDataList := [][]byte{} if !c.HasPermissionsToChannel(cchan, "uploadFile") { return } for i, _ := range files { file, err := files[i].Open() defer file.Close() if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } buf := bytes.NewBuffer(nil) io.Copy(buf, file) ext := filepath.Ext(files[i].Filename) uid := model.NewId() path := "teams/" + c.Session.TeamId + "/channels/" + channelId + "/users/" + c.Session.UserId + "/" + uid + "/" + files[i].Filename if model.IsFileExtImage(ext) { options := s3.Options{} err = bucket.Put(path, buf.Bytes(), model.GetImageMimeType(ext), s3.Private, options) imageNameList = append(imageNameList, uid+"/"+files[i].Filename) imageDataList = append(imageDataList, buf.Bytes()) } else { options := s3.Options{} err = bucket.Put(path, buf.Bytes(), "binary/octet-stream", s3.Private, options) } if err != nil { c.Err = model.NewAppError("uploadFile", "Unable to upload file. ", err.Error()) return } fileUrl := c.TeamUrl + "/api/v1/files/get/" + channelId + "/" + c.Session.UserId + "/" + uid + "/" + url.QueryEscape(files[i].Filename) resStruct.Filenames = append(resStruct.Filenames, fileUrl) } fireAndForgetHandleImages(imageNameList, imageDataList, c.Session.TeamId, channelId, c.Session.UserId) w.Write([]byte(resStruct.ToJson())) }
func TestUserUploadProfileImage(t *testing.T) { th := Setup() Client := th.CreateClient() team := &model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "*****@*****.**", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) user := &model.User{Email: strings.ToLower(model.NewId()) + "*****@*****.**", Nickname: "Corey Hulen", Password: "******"} user = Client.Must(Client.CreateUser(user, "")).Data.(*model.User) LinkUserToTeam(user, team) store.Must(Srv.Store.User().VerifyEmail(user.Id)) if utils.Cfg.FileSettings.DriverName != "" { body := &bytes.Buffer{} writer := multipart.NewWriter(body) if _, upErr := Client.UploadProfileFile(body.Bytes(), writer.FormDataContentType()); upErr == nil { t.Fatal("Should have errored") } Client.Login(user.Email, "pwd") Client.SetTeamId(team.Id) if _, upErr := Client.UploadProfileFile(body.Bytes(), writer.FormDataContentType()); upErr == nil { t.Fatal("Should have errored") } part, err := writer.CreateFormFile("blargh", "test.png") if err != nil { t.Fatal(err) } path := utils.FindDir("tests") file, err := os.Open(path + "/test.png") if err != nil { t.Fatal(err) } defer file.Close() _, err = io.Copy(part, file) if err != nil { t.Fatal(err) } if err := writer.Close(); err != nil { t.Fatal(err) } if _, upErr := Client.UploadProfileFile(body.Bytes(), writer.FormDataContentType()); upErr == nil { t.Fatal("Should have errored") } file2, err := os.Open(path + "/test.png") if err != nil { t.Fatal(err) } defer file2.Close() body = &bytes.Buffer{} writer = multipart.NewWriter(body) part, err = writer.CreateFormFile("image", "test.png") if err != nil { t.Fatal(err) } if _, err := io.Copy(part, file2); err != nil { t.Fatal(err) } if err := writer.Close(); err != nil { t.Fatal(err) } if _, upErr := Client.UploadProfileFile(body.Bytes(), writer.FormDataContentType()); upErr != nil { t.Fatal(upErr) } Client.DoApiGet("/users/"+user.Id+"/image", "", "") if utils.Cfg.FileSettings.DriverName == model.IMAGE_DRIVER_S3 { var auth aws.Auth auth.AccessKey = utils.Cfg.FileSettings.AmazonS3AccessKeyId auth.SecretKey = utils.Cfg.FileSettings.AmazonS3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.FileSettings.AmazonS3Region]) bucket := s.Bucket(utils.Cfg.FileSettings.AmazonS3Bucket) if err := bucket.Del("users/" + user.Id + "/profile.png"); err != nil { t.Fatal(err) } } else { path := utils.Cfg.FileSettings.Directory + "users/" + user.Id + "/profile.png" if err := os.Remove(path); err != nil { t.Fatal("Couldn't remove file at " + path) } } } else { body := &bytes.Buffer{} writer := multipart.NewWriter(body) if _, upErr := Client.UploadProfileFile(body.Bytes(), writer.FormDataContentType()); upErr.StatusCode != http.StatusNotImplemented { t.Fatal("Should have failed with 501 - Not Implemented") } } }
func uploadProfileImage(c *Context, w http.ResponseWriter, r *http.Request) { if !utils.IsS3Configured() { c.Err = model.NewAppError("uploadProfileImage", "Unable to upload image. Amazon S3 not configured. ", "") c.Err.StatusCode = http.StatusNotImplemented return } if err := r.ParseMultipartForm(10000000); err != nil { c.Err = model.NewAppError("uploadProfileImage", "Could not parse multipart form", "") return } var auth aws.Auth auth.AccessKey = utils.Cfg.AWSSettings.S3AccessKeyId auth.SecretKey = utils.Cfg.AWSSettings.S3SecretAccessKey s := s3.New(auth, aws.Regions[utils.Cfg.AWSSettings.S3Region]) bucket := s.Bucket(utils.Cfg.AWSSettings.S3Bucket) m := r.MultipartForm imageArray, ok := m.File["image"] if !ok { c.Err = model.NewAppError("uploadProfileImage", "No file under 'image' in request", "") c.Err.StatusCode = http.StatusBadRequest return } if len(imageArray) <= 0 { c.Err = model.NewAppError("uploadProfileImage", "Empty array under 'image' in request", "") c.Err.StatusCode = http.StatusBadRequest return } imageData := imageArray[0] file, err := imageData.Open() defer file.Close() if err != nil { c.Err = model.NewAppError("uploadProfileImage", "Could not open image file", err.Error()) return } // Decode image into Image object img, _, err := image.Decode(file) if err != nil { c.Err = model.NewAppError("uploadProfileImage", "Could not decode profile image", err.Error()) return } // Scale profile image img = resize.Resize(utils.Cfg.ImageSettings.ProfileWidth, utils.Cfg.ImageSettings.ProfileHeight, img, resize.Lanczos3) buf := new(bytes.Buffer) err = png.Encode(buf, img) if err != nil { c.Err = model.NewAppError("uploadProfileImage", "Could not encode profile image", err.Error()) return } path := "teams/" + c.Session.TeamId + "/users/" + c.Session.UserId + "/profile.png" options := s3.Options{} if err := bucket.Put(path, buf.Bytes(), "image", s3.Private, options); err != nil { c.Err = model.NewAppError("uploadProfileImage", "Couldn't upload profile image", "") return } Srv.Store.User().UpdateUpdateAt(c.Session.UserId) c.LogAudit("") }