Ejemplo n.º 1
0
func (deleteDs *DeleteDataSource) DeleteImages(w http.ResponseWriter, r *http.Request) error {
	queryValues := r.URL.Query()
	imageKey := queryValues.Get("key")
	var redisKey, s3Key string
	if imageKey != "" {
		/*
			Split on S3 bucket followed by dynamic delimiter
			redisKey is the prfix till filename
		*/
		chunk := strings.Split(imageKey, deleteDs.helper.Config.GetAmazonS3Bucket()+"/")
		chunk = strings.Split(chunk[len(chunk)-1], "_")
		s3Key = chunk[0]
		chunk = strings.Split(s3Key, "/")
		var buffer bytes.Buffer
		for i := 0; i < len(chunk)-1; i++ {
			buffer.WriteString(chunk[i] + "/")
		}
		redisKey = strings.TrimSuffix(buffer.String(), "/")
		log.Println(redisKey)
	} else {
		redisKey = queryValues.Get("pathKey")
		s3Key = redisKey
	}

	fileLocation, err := redis.Strings(deleteDs.connect.RedisConn.Do("LRANGE", redisKey, 0, -1))
	if err != nil {
		log.Println("Error retrieving records (redis) : ", err)
		return err
	}

	if len(fileLocation) == 0 || !utils.Search(imageKey, fileLocation) {
		utils.WrapResponse(w, nil, http.StatusNotFound)
		return nil
	}

	err = deleteDs.helper.BatchDelete(s3Key)
	if err == nil {
		utils.WrapResponse(w, nil, http.StatusOK)
		deleteDs.connect.RedisConn.Do("DEL", redisKey)
		// Remove only that key from redis and push back the list
		if imageKey != "" {
			for _, elem := range fileLocation {
				if !strings.EqualFold(elem, imageKey) {
					deleteDs.connect.RedisConn.Do("RPUSH", redisKey, elem)
				}
			}
		}
	}
	return err
}
Ejemplo n.º 2
0
func (ds UploadDataSource) UploadFile(w http.ResponseWriter, r *http.Request, p httprouter.Params) error {
	if !ds.checkSize(w, r) {
		return nil
	}

	r.ParseMultipartForm(32 << 15)
	file, handler, err := r.FormFile("imageFile")
	if err != nil {
		log.Println("Error opening multipart file : ", err)
		return err
	}
	defer file.Close()

	format := handler.Header["Content-Type"]
	if !utils.Search(format[0], utils.ACCEPTED_FORMAT) {
		log.Println("Unknown format: ", format)
		utils.WrapResponse(w, nil, http.StatusUnsupportedMediaType)
		return nil
	}

	/* Process request information details */
	requestString := r.FormValue("requestBody")
	if requestString == "" {
		log.Println("No configuration details provided")
		utils.WrapResponse(w, utils.GetErrorContent(3), http.StatusBadRequest)
		return nil
	}

	var config models.UploadModel
	json.Unmarshal([]byte(requestString), &config)

	// Decode file based on the format of the image

	/*Dump to file
	out , err:= os.Create("test.png")
	io.Copy(out, file)
	log.Println(err)*/

	fileType := strings.Split(format[0], "/")
	img, err := ds.helper.Decode(file, fileType[1])
	if err != nil {
		return err
	}
	timeStamp := strconv.FormatInt(time.Now().Unix(), 10)
	ds.helper.SetFileName(timeStamp)

	/*
	   Use encoders to encode the content to the file and upload to s3
	   Note: use io.Copy to just dump the body to file. Supports huge file
	*/
	buf := new(bytes.Buffer)
	err = ds.helper.Encode(buf, img, config.GetConversionType())
	if err != nil {
		log.Println("Error encoding original : ", err)
		return err
	}

	fileName := config.GetFilePath() + utils.S3_SEPARATOR + timeStamp + "." + config.GetConversionType()
	err = ds.helper.UploadToS3(buf, fileName, "image/"+config.GetConversionType())
	if err != nil {
		return err
	}

	/*
			1. Send response after uploading original
		   	2. So that client doesn't have to wait for compressions
			3. Store the path in redis and send back the location response
	*/
	s3Path := ds.helper.GetS3Path()
	dynamicFileName := s3Path + config.GetFilePath() + utils.S3_SEPARATOR + timeStamp + "_{width}x{height}." + config.GetConversionType()
	_, err = ds.conn.RedisConn.Do("RPUSH", config.GetFilePath(), dynamicFileName)
	if err != nil {
		log.Println("Error writing (redis) : ", err)
	}
	successResponse := models.UploadResponse{
		ImageURL:        s3Path + fileName,
		DynamicImageURL: dynamicFileName,
	}
	utils.WrapResponse(w, successResponse, http.StatusCreated)

	/*
	   Scale images to most expected dimensions and upload to S3
	*/

	ds.helper.ScaleImage(img, config)

	return nil
}