Ejemplo n.º 1
0
func extractHTTPFormValues(reader *multipart.Reader) (io.Reader, map[string]string, *probe.Error) {
	/// HTML Form values
	formValues := make(map[string]string)
	filePart := new(bytes.Buffer)
	var err error
	for err == nil {
		var part *multipart.Part
		part, err = reader.NextPart()
		if part != nil {
			if part.FileName() == "" {
				buffer, err := ioutil.ReadAll(part)
				if err != nil {
					return nil, nil, probe.NewError(err)
				}
				formValues[http.CanonicalHeaderKey(part.FormName())] = string(buffer)
			} else {
				_, err := io.Copy(filePart, part)
				if err != nil {
					return nil, nil, probe.NewError(err)
				}
			}
		}
	}
	return filePart, formValues, nil
}
Ejemplo n.º 2
0
func handleUpload(r *http.Request, p *multipart.Part) (fi *FileInfo) {
	fi = &FileInfo{
		Name: p.FileName(),
		Type: p.Header.Get("Content-Type"),
	}
	if !fi.ValidateType() {
		return
	}
	defer func() {
		if rec := recover(); rec != nil {
			log.Println(rec)
			fi.Error = rec.(error).Error()
		}
	}()
	lr := &io.LimitedReader{R: p, N: MAX_FILE_SIZE + 1}
	context := appengine.NewContext(r)
	w, err := blobstore.Create(context, fi.Type)
	defer func() {
		w.Close()
		fi.Size = MAX_FILE_SIZE + 1 - lr.N
		fi.Key, err = w.Key()
		check(err)
		if !fi.ValidateSize() {
			err := blobstore.Delete(context, fi.Key)
			check(err)
			return
		}
		delayedDelete(context, fi)
		fi.CreateUrls(r, context)
	}()
	check(err)
	_, err = io.Copy(w, lr)
	return
}
Ejemplo n.º 3
0
// Extract form fields and file data from a HTTP POST Policy
func extractPostPolicyFormValues(reader *multipart.Reader) (filePart io.Reader, fileName string, formValues map[string]string, err error) {
	/// HTML Form values
	formValues = make(map[string]string)
	fileName = ""
	for err == nil {
		var part *multipart.Part
		part, err = reader.NextPart()
		if part != nil {
			canonicalFormName := http.CanonicalHeaderKey(part.FormName())
			if canonicalFormName != "File" {
				var buffer []byte
				limitReader := io.LimitReader(part, maxFormFieldSize+1)
				buffer, err = ioutil.ReadAll(limitReader)
				if err != nil {
					return nil, "", nil, err
				}
				if int64(len(buffer)) > maxFormFieldSize {
					return nil, "", nil, errSizeUnexpected
				}
				formValues[canonicalFormName] = string(buffer)
			} else {
				filePart = part
				fileName = part.FileName()
				// As described in S3 spec, we expect file to be the last form field
				break
			}
		}
	}
	return filePart, fileName, formValues, nil
}
Ejemplo n.º 4
0
func sendFileBlobs(c *blobserv.Client, part *multipart.Part) (respMeta map[string]interface{}) {
	meta := blob.NewMeta()
	defer func() {
		respMeta = map[string]interface{}{}
		respMeta["name"] = meta.Name
		respMeta["size"] = meta.Size

		if r := recover(); r != nil {
			respMeta["error"] = r.(error).Error()
		}
	}()

	obj := blob.NewObject()
	meta.RcasObjectRef = obj.Ref()
	meta.Name = part.FileName()

	data, err := ioutil.ReadAll(part)
	util.Check(err)

	meta.Size = int64(len(data))

	blobs := blob.SplitRaw(data, blob.DefaultChunkSize)
	meta.ContentRefs = blob.RefsFor(blobs)

	m, err := blob.Marshal(meta)
	util.Check(err)

	blobs = append(blobs, m, obj)
	for _, b := range blobs {
		err = c.PutBlob(b)
		util.Check(err)
	}

	return respMeta
}
Ejemplo n.º 5
0
// save a single image
func handleSaveSingleImage(part *multipart.Part) (info models.ImageInfo, err error) {
	newID := bson.NewObjectId()
	date := time.Now().Format("20060102")

	err = helper.CreateDirIfNotExists(filepath.Join(helper.Config.SaveDir, date))
	if err != nil {
		return
	}
	path := filepath.Join(date, newID.Hex())
	savePath := filepath.Join(helper.Config.SaveDir, path)

	dst, err := os.Create(savePath)

	if err != nil {
		return
	}

	defer dst.Close()

	var bytes int64
	if bytes, err = io.Copy(dst, part); err != nil {
		return
	}

	width, height := helper.GetImageDimensions(savePath)

	var hash models.HashInfo

	hash, err = helper.CalculateBasicHashes(savePath)

	if err != nil {
		return
	}

	URL := helper.Config.BaseURL + "/img/" + newID.Hex()

	info = models.ImageInfo{
		ID:        newID,
		Name:      part.FileName(),
		Extension: filepath.Ext(part.FileName()),
		Path:      path,
		Width:     width,
		Height:    height,
		URL:       URL,
		Resizes:   map[string]string{},
		Hash:      hash,
		Size:      bytes,
		CreatedAt: time.Now(),
	}
	err = db.StoreImage(&info)
	if err != nil {
		return
	}
	return info, nil
}
Ejemplo n.º 6
0
func readAttachmentFromMimePart(part *multipart.Part) NNTPAttachment {
	hdr := part.Header

	content_type := hdr.Get("Content-Type")
	media_type, _, err := mime.ParseMediaType(content_type)
	buff := new(bytes.Buffer)
	fname := part.FileName()
	idx := strings.LastIndex(fname, ".")
	ext := ".txt"
	if idx > 0 {
		ext = fname[idx:]
	}

	transfer_encoding := hdr.Get("Content-Transfer-Encoding")

	if transfer_encoding == "base64" {
		// read the attachment entirely
		io.Copy(buff, part)
		// clear reference
		part = nil
		// allocate a buffer for the decoded part
		att_bytes := make([]byte, base64.StdEncoding.DecodedLen(buff.Len()))
		decoded_bytes := make([]byte, len(att_bytes))
		// decode
		_, err = base64.StdEncoding.Decode(decoded_bytes, buff.Bytes())
		// reset original attachment buffer
		buff.Reset()
		// copy and wrap
		copy(att_bytes, decoded_bytes)
		buff = bytes.NewBuffer(att_bytes)
		att_bytes = nil
		// clear reference
		decoded_bytes = nil
	} else {
		_, err = io.Copy(buff, part)
		// clear reference
		part = nil
	}
	if err != nil {
		log.Println("failed to read attachment from mimepart", err)
		return nil
	}
	sha := sha512.Sum512(buff.Bytes())
	hashstr := base32.StdEncoding.EncodeToString(sha[:])
	fpath := hashstr + ext
	return nntpAttachment{
		body:     *buff,
		header:   hdr,
		mime:     media_type,
		filename: fname,
		filepath: fpath,
		ext:      ext,
		hash:     sha[:],
	}
}
Ejemplo n.º 7
0
func uploadPhoto(part *multipart.Part, c *http.Cookie) *http.Cookie {
	fmt.Printf("%v", part.FileName())
	fName := getSha(part) + filepath.Ext(part.FileName())
	wd, _ := os.Getwd()
	path := filepath.Join(wd, "assets", "imgs", fName)
	dst, _ := os.Create(path)
	defer dst.Close()
	//src.Seek(0,0)
	io.Copy(dst, part)
	return addPhoto(fName, c)
}
Ejemplo n.º 8
0
func unpackPart(part *multipart.Part, emlbase string, errors chan error) {

	defer part.Close()

	partFileName := part.FileName()
	if partFileName == "" {
		return
	}

	attachmentFileName := emlbase + " " + partFileName

	attachmentFile, err := os.Create(attachmentFileName)
	if err != nil {
		errors <- MessageError(
			fmt.Sprintf(
				"Problem opening the %q file: %s",
				attachmentFileName,
				err.Error()))
		return
	}
	defer attachmentFile.Close()

	enc := part.Header.Get("Content-Transfer-Encoding")

	var partReader io.Reader

	switch enc {
	case "", "7bit", "8bit":
		partReader = part

	case "base64", "BASE64", "Base64":
		partReader = base64.NewDecoder(base64.StdEncoding, part)

	default:
		errors <- MessageError(
			fmt.Sprintf(
				"Attachment %q: unknown encoging %q",
				attachmentFileName,
				enc))
		return
	}

	_, err = io.Copy(attachmentFile, partReader)
	if err != nil {
		errors <- MessageError(
			fmt.Sprintf(
				"Problem copying the %q part of the %q message: %s",
				attachmentFile,
				emlbase,
				err.Error()))
		return
	}
}
func checkPartFileExists(t *testing.T, part *multipart.Part) (ok, skipped bool) {
	if part.FormName() != "file" {
		return false, true
	}

	if part.FileName() != "file1.ext" {
		t.Errorf("Filename not set")
		return
	}

	return true, false
}
Ejemplo n.º 10
0
func mimeTypeForPart(part *multipart.Part) string {
	if contentType := part.Header.Get("Content-Type"); contentType != "" {
		if _, _, err := mime.ParseMediaType(contentType); err == nil {
			return contentType
		}
	}
	ext := path.Ext(part.FileName())
	mimeType := mime.TypeByExtension(ext)
	if mimeType == "" {
		mimeType = "image/jpeg"
	}
	return mimeType
}
Ejemplo n.º 11
0
func (d *tenpuInput) SetMultipart(part *multipart.Part) (isFile bool) {
	if part.FileName() != "" {
		d.FileName = part.FileName()
		d.ContentType = part.Header["Content-Type"][0]
		isFile = true
		return
	}

	switch part.FormName() {
	case "OwnerId":
		d.OwnerId = formValue(part)
	}
	return
}
Ejemplo n.º 12
0
func (this *ImageInput) SetMultipart(part *multipart.Part) (isFile bool) {
	if part.FileName() != "" {
		this.FileName = part.FileName()
		this.ContentType = part.Header["Content-Type"][0]
		isFile = true
		return
	}

	switch part.FormName() {
	case "OwnerId":
		this.OwnerId = formValue(part)
	}
	return
}
Ejemplo n.º 13
0
func handleUpload(p *multipart.Part, dir string) (fi *fileInfo) {
	fi = &fileInfo{
		Name: p.FileName(),
		Type: p.Header.Get("Content-Type"),
	}

	path := filepath.Clean(dir + "/" + fi.Name)
	f, _ := os.Create(path)

	io.Copy(f, p)

	f.Close()

	return
}
Ejemplo n.º 14
0
/*
func delayedDelete(c appengine.Context, fi *FileInfo) {
	if key := string(fi.Key); key != "" {
		task := &taskqueue.Task{
			Path:   "/" + escape(key) + "/-",
			Method: "DELETE",
			Delay:  time.Duration(EXPIRATION_TIME) * time.Second,
		}
		taskqueue.Add(c, task, "")
	}
}
*/
func handleUpload(r *http.Request, p *multipart.Part) (fi *FileInfo) {
	fi = &FileInfo{
		Name: p.FileName(),
		Type: p.Header.Get("Content-Type"),
	}
	if !fi.ValidateType() {
		return
	}
	defer func() {
		if rec := recover(); rec != nil {
			fmt.Println(rec)
			fi.Error = rec.(error).Error()
		}
	}()
	lr := &io.LimitedReader{R: p, N: MAX_FILE_SIZE + 1}
  err := os.MkdirAll(path.Join(UPLOAD_DIR,target_dir), 0777)
  if err != nil {
    fmt.Println(err)
  }
  f, err := os.Create(path.Join(UPLOAD_DIR,target_dir,p.FileName()))
  /*
	context := appengine.NewContext(r)
	w, err := blobstore.Create(context, fi.Type)
  */
	defer func() {
		f.Close()
		fi.Size = MAX_FILE_SIZE + 1 - lr.N
		//fi.Key, err = w.Key()
		//check(err)
    /*
		if !fi.ValidateSize() {
			err := blobstore.Delete(context, fi.Key)
			check(err)
			return
		}
		delayedDelete(context, fi)
		fi.CreateUrls(r, context)
    */
	}()
	check(err)
	_, err = io.Copy(f, lr)
	return
}
Ejemplo n.º 15
0
func handleUpload(r *http.Request, p *multipart.Part) (fi *FileInfo) {
	fi = &FileInfo{
		Name: p.FileName(),
		Type: p.Header.Get("Content-Type"),
	}
	if !fi.ValidateType() {
		return
	}
	defer func() {
		if rec := recover(); rec != nil {
			log.Println(rec)
			fi.Error = rec.(os.Error).String()
		}
	}()
	var b bytes.Buffer
	lr := &io.LimitedReader{p, MAX_FILE_SIZE + 1}
	context := appengine.NewContext(r)
	w, err := blobstore.Create(context, fi.Type)
	defer func() {
		w.Close()
		fi.Size = MAX_FILE_SIZE + 1 - lr.N
		fi.Key, err = w.Key()
		check(err)
		if !fi.ValidateSize() {
			err := blobstore.Delete(context, fi.Key)
			check(err)
			return
		}
		delayedDelete(context, fi)
		if b.Len() > 0 {
			fi.CreateThumbnail(&b, context)
		}
		fi.CreateUrls(r, context)
	}()
	check(err)
	var wr io.Writer = w
	if imageTypes.MatchString(fi.Type) {
		wr = io.MultiWriter(&b, w)
	}
	_, err = io.Copy(wr, lr)
	return
}
Ejemplo n.º 16
0
func handleUpload(r *http.Request, p *multipart.Part, root string) {
	defer func() {
		if rec := recover(); rec != nil {
			logger.Println(rec)
		}
	}()
	lr := &io.LimitedReader{R: p, N: MaxFileSize + 1}
	filename := filepath.Join(root, conf.Server.StaticDir, p.FileName())
	fo, err := os.Create(filename)
	if err != nil {
		logger.Printf("err writing %q!, err = %s\n", filename, err.Error())
	}
	defer fo.Close()
	w := bufio.NewWriter(fo)
	_, err = io.Copy(w, lr)
	if err != nil {
		logger.Printf("err writing %q!, err = %s\n", filename, err.Error())
	}
	if err = w.Flush(); err != nil {
		logger.Printf("err flushing writer for %q!, err = %s\n", filename, err.Error())
	}
	return
}
Ejemplo n.º 17
0
// parsePart parses a multipart mime part for an input or carrier form
// entry.  File uploads will be buffered entirely into memory.
func parsePart(part *multipart.Part) (u *url.URL, rc io.ReadCloser, err error) {
	filename := part.FileName()
	if filename != "" {
		// We assume entire file.
		// Parts are being read from multipart mime, so we
		// buffer the entire thing into memory. :[

		buf := new(bytes.Buffer)
		_, err := buf.ReadFrom(part)
		if err != nil {
			return nil, nil, err
		}
		return nil, bytesBufferCloser{buf}, nil
	}
	// We assume URL.
	inputBytes, err := ioutil.ReadAll(part)
	if err != nil {
		return nil, nil, err
	}
	rawurl := string(inputBytes)
	if rawurl == "" {
		// Form may have just been
		// submited with no URL, but
		// with file input in another
		// field of the same name.
		// We'll check for missing
		// values after we go through
		// all the parts.
		return nil, nil, nil
	}
	u, err = parseURL(rawurl)
	if err != nil {
		return nil, nil, err
	}
	return u, nil, nil
}
Ejemplo n.º 18
0
func handleUpload(r *http.Request, p *multipart.Part) (fi *FileInfo) {
	fi = &FileInfo{
		Name: p.FileName(),
		Type: p.Header.Get("Content-Type"),
	}
	if !fi.ValidateType() {
		return
	}
	defer func() {
		if rec := recover(); rec != nil {
			log.Println(rec)
			fi.Error = rec.(error).Error()
		}
	}()
	var buffer bytes.Buffer
	hash := crc32.NewIEEE()
	mw := io.MultiWriter(&buffer, hash)
	lr := &io.LimitedReader{R: p, N: MAX_FILE_SIZE + 1}
	_, err := io.Copy(mw, lr)
	check(err)
	fi.Size = MAX_FILE_SIZE + 1 - lr.N
	if !fi.ValidateSize() {
		return
	}
	fi.SetKey(hash.Sum32())
	item := &memcache.Item{
		Key:   fi.Key,
		Value: buffer.Bytes(),
	}
	context := appengine.NewContext(r)
	err = memcache.Set(context, item)
	check(err)
	fi.createThumb(&buffer, context)
	fi.CreateUrls(r, context)
	return
}
Ejemplo n.º 19
0
func write_file(part *multipart.Part) error {
	dir_name := *dist_dir +
		"/" + time.Now().Format("2006-01-02")
	file_name := dir_name + "/" + part.FileName()
	if err := os.Mkdir(dir_name, 0755); err != nil {
		if os.IsNotExist(err) {
			return err
		}
	}
	if fd, err := os.Open(file_name); err == nil {
		fd.Close()
		return nil
	}
	var err error
	var newfile *os.File
	if newfile, err = os.Create(file_name); err != nil {
		return err
	}
	log.Println("create", file_name)
	defer newfile.Close()
	buf := make([]byte, 1024*1024)
	for {
		n, err := part.Read(buf)
		newfile.Write(buf[:n])
		if err == io.EOF {
			err = nil
			break
		}
		if err != nil {
			os.Remove(file_name)
			log.Print("remove", file_name)
			break
		}
	}
	return err
}
Ejemplo n.º 20
0
// save a single image
func handleSaveSingleFile(part *multipart.Part) (info models.FileInfo, err error) {
	newID := bson.NewObjectId()
	date := time.Now().Format("20060102")

	err = helper.CreateDirIfNotExists(filepath.Join(helper.Config.SaveDir, date))
	if err != nil {
		return
	}
	path := filepath.Join(date, newID.Hex())
	savePath := filepath.Join(helper.Config.SaveDir, path)

	dst, err := os.Create(savePath)

	if err != nil {
		return
	}

	defer dst.Close()

	var bytes int64
	if bytes, err = io.Copy(dst, part); err != nil {
		return
	}

	var hash models.HashInfo

	hash, err = helper.CalculateBasicHashes(savePath)

	if err != nil {
		return
	}

	URL := helper.Config.BaseURL + "/file/" + newID.Hex()

	info = models.FileInfo{
		ID:          newID,
		Name:        part.FileName(),
		Extension:   filepath.Ext(part.FileName()),
		Path:        path,
		URL:         URL,
		Hash:        hash,
		Size:        bytes,
		CreatedAt:   time.Now(),
		ContentType: getSimpleContentTypeByFileName(part.FileName()),
	}
	err = db.StoreResource(&info)
	if err != nil {
		return
	}
	return info, nil
}
Ejemplo n.º 21
0
// UploadImageFile uploads an image file to a storage.
func UploadImageFile(part *multipart.Part) {

	mediatype, _, _ := mime.ParseMediaType(part.Header.Get("Content-Type"))
	log.Debugf("params %s", mediatype)
	log.Debug("fileName : " + part.FileName())
	inbuf, err := ioutil.ReadAll(part)
	if err != nil {
		log.CheckErrorWithMessage(err, "Image read failed.")
	}
	// Image resize is a bottleneck. How can we improve this?
	// https://github.com/fawick/speedtest-resize said vipsthumbnail is fastest one.
	// Currenctly goyangi uses vips(https://github.com/DAddYE/vips).

	// dst, _ := image.ResizeMedium(mediatype, bytes.NewReader(inBuf))
	var dst *bytes.Buffer
	buf, err := image.ResizeMediumVips(inbuf)
	if err != nil {
		log.CheckErrorWithMessage(err, "Image resizing failed.")
		dst = bytes.NewBuffer(inbuf)
	} else {
		dst = bytes.NewBuffer(buf)
	}

	// var thumbDst *bytes.Buffer
	// thumbBuf, err := image.ResizeThumbnailVips(buf)
	// if err != nil {
	// 	log.CheckErrorWithMessage(err, "Image thumbnailing failed.")
	// 	thumbDst = bytes.NewBuffer(buf)
	// } else {
	// 	thumbDst = bytes.NewBuffer(thumbBuf)
	// }

	switch config.UploadTarget {
	case "LOCAL":
		err = file.SaveLocal(part.FileName(), dst)
		// err = file.SaveLocal(part.FileName()+"Thumbnail", thumbDst)
	case "S3":
		err = aws.PutToMyPublicBucket(s3UploadPath, part.FileName(), dst, mediatype)
		// err = aws.PutToMyPublicBucket("images/", part.FileName()+"Thumbnail", thumbDst, mediatype)
	}
	if err != nil {
		log.CheckErrorWithMessage(err, "Uploading failed.")
	}
}
Ejemplo n.º 22
0
func (h *FileHandler) Upload(request shared.IRequest) error {

	var functionName string
	var fileCollection string
	var collectionRef string
	var collectionId uint64

	err := request.URLMatch(&functionName, &fileCollection, &collectionRef, &collectionId)
	if err != nil {
		return err
	}

	_, r := request.GetRaw()
	if r.Method != "POST" && r.Method != "PUT" {
		request.WriteString("Must post a file (1)")
		return nil
	}

	mpr, err := r.MultipartReader()
	if err != nil {
		return err
	}

	var part *multipart.Part
	for {
		thisPart, err := mpr.NextPart()
		if err != nil {
			break
		}
		if thisPart.FormName() == "attachment" {
			part = thisPart
			break
		}
	}
	if part == nil {
		request.WriteString("Must post a file (2)")
		return nil
	}

	origName := part.FileName()

	randBytes := make([]byte, 22, 22)
	_, _ = rand.Reader.Read(randBytes)
	fileName := hex.EncodeToString(randBytes)

	bucket, err := h.getBucket()
	if err != nil {
		return err
	}

	upload, err := ioutil.ReadAll(part)
	if err != nil {
		return err
	}
	err = bucket.Put(h.Path+fileName, upload, "application/octet-stream", s3.Private)
	if err != nil {
		return err
	}
	log.Println("File Written")

	dbEntry := map[string]interface{}{
		collectionRef: collectionId,
		"file":        fileName,
		"filename":    origName,
	}

	err = h.writeDatabaseEntry(request, dbEntry, fileCollection)
	if err != nil {
		return err
	}

	request.WriteString(`
		<script type='text/javascript'>
		window.top.file_done()
		</script>
		Uploaded Successfully.
	`)
	return nil
}
Ejemplo n.º 23
0
func makeUploader(ownerName string, category string, clear bool, storage Storage) http.HandlerFunc {
	if storage == nil {
		panic("storage must be provided.")
	}

	return func(w http.ResponseWriter, r *http.Request) {
		mr, err := r.MultipartReader()

		if err != nil {
			panic(err)
		}

		var ownerId string
		var part *multipart.Part
		var attachments []*Attachment

		for {
			part, err = mr.NextPart()
			if err != nil {
				break
			}

			if part.FileName() == "" {
				if part.FormName() == ownerName {
					ownerId = formValue(part)
				}
				continue
			}

			if ownerId == "" {
				writeJson(w, fmt.Sprintf("ownerId required, Please put a hidden field in form called `%s`", ownerName), nil)
				return
			}
			att := &Attachment{}
			att.Category = category
			att.OwnerId = ownerId
			err = storage.Put(part.FileName(), part.Header["Content-Type"][0], part, att)
			if err != nil {
				att.Error = err.Error()
			}
			attachments = append(attachments, att)
		}
		if len(attachments) == 0 {
			writeJson(w, "No attachments uploaded.", nil)
			return
		}

		for _, att := range attachments {
			if att.Error != "" {
				err = errors.New("Some attachment has error")
			} else {
				storage.Database().Save(CollectionName, att)
			}
		}

		if clear {
			dbc := DatabaseClient{Database: storage.Database()}
			ats := dbc.Attachments(ownerId)
			for i := len(ats) - 1; i >= 0; i -= 1 {
				found := false
				for _, newAt := range attachments {
					if ats[i].Id == newAt.Id {
						found = true
						break
					}
				}
				if found {
					continue
				}
				for _, newAt := range attachments {
					if newAt.OwnerId == ats[i].OwnerId {
						_, err = deleteAttachment(ats[i].Id, storage)
					}
				}
			}
		}

		dbc := DatabaseClient{Database: storage.Database()}
		ats := dbc.Attachments(ownerId)
		if err != nil {
			writeJson(w, err.Error(), ats)
			return
		}

		writeJson(w, "", ats)
		return
	}
}
Ejemplo n.º 24
0
// Create handles multipart upload of meta data and blob
func (handler *ObjectWebHandler) Create(w http.ResponseWriter, r *http.Request) {
	var (
		err   error
		part  *multipart.Part
		value []byte
		file  *mgo.GridFile
	)

	// We respond as JSON
	w.Header().Set("Content-Type", "application/json; charset=utf-8")

	// Check if the content type is correct
	if ctype := r.Header.Get("Content-Type"); !strings.HasPrefix(ctype, "multipart/form-data") {
		respondWithError(w, http.StatusUnsupportedMediaType, "Unsupported media type", fmt.Errorf("Expecting multipart/form-data content type but received: %v", ctype))
		return
	}

	body, err := r.MultipartReader()
	if err != nil {
		respondWithError(w, http.StatusBadRequest, "Failed to parse data", err)
		return
	}

	fs := handler.Session.DB(os.Getenv(EnvGridFSDatabase)).GridFS(os.Getenv(EnvGridFSPrefix))

	obj := &ObjectMeta{}
	obj.Metadata = map[string]interface{}{}
	obj.Metadata["cid"] = r.Header.Get("X-Correlation-Id")

	for part, err = body.NextPart(); err == nil; part, err = body.NextPart() {
		if part.FormName() == "filename" && part.FileName() == "" {
			value, err = ioutil.ReadAll(part)
			if err != nil {
				break
			}
			obj.Filename = string(value)
		} else if part.FormName() == "content_type" && part.FileName() == "" {
			value, err = ioutil.ReadAll(part)
			if err != nil {
				break
			}
			obj.ContentType = string(value)
		} else if part.FormName() == "extra.bucket" && part.FileName() == "" {
			value, err = ioutil.ReadAll(part)
			if err != nil {
				break
			}
			obj.Metadata["bucket"] = string(value)
		} else if part.FormName() == "object" && part.FileName() != "" {
			file, err = fs.Create(part.FileName())
			if err != nil {
				respondWithError(w, http.StatusInternalServerError, "Failed to create GridFS file", err)
				return
			}
			_, err = io.Copy(file, part)
			if err != nil {
				file.Close()
				respondWithError(w, http.StatusInternalServerError, "Failed to save GridFS file", err)
				return
			}
		}
	}
	if err != nil && err != io.EOF {
		if file != nil {
			file.Close()
		}
		respondWithError(w, http.StatusBadRequest, "Failed to process multipart form", err)
		return
	} else if file == nil {
		respondWithError(w, http.StatusBadRequest, "Bad request", fmt.Errorf("No object has been uploaded"))
		return
	}

	// Update metadata
	file.SetName(obj.Filename)
	file.SetMeta(obj.Metadata)
	if obj.ContentType != "" {
		file.SetContentType(obj.ContentType)
	}
	err = file.Close()
	if err != nil {
		respondWithError(w, http.StatusInternalServerError, "Failed to close GridFS file", err)
		return
	}
	obj.ID = file.Id().(bson.ObjectId)

	// Read newly created meta & return it
	err = fs.Find(bson.M{"_id": obj.ID}).One(&obj)
	if err == mgo.ErrNotFound {
		respondWithError(w, http.StatusInternalServerError, "Newly created could not be found", err)
		return
	} else if err != nil {
		respondWithError(w, http.StatusInternalServerError, "Operational error", err)
		return
	}

	w.WriteHeader(http.StatusCreated)
	json.NewEncoder(w).Encode(obj)
}
Ejemplo n.º 25
0
// read an article via dotreader
func (c *v1Conn) readArticle(newpost bool, hooks EventHooks) (ps PolicyStatus, err error) {
	store_r, store_w := io.Pipe()
	article_r, article_w := io.Pipe()
	article_body_r, article_body_w := io.Pipe()

	accept_chnl := make(chan PolicyStatus)
	store_info_chnl := make(chan ArticleEntry)
	store_result_chnl := make(chan error)

	hdr_chnl := make(chan message.Header)

	log.WithFields(log.Fields{
		"pkg": "nntp-conn",
	}).Debug("start reading")
	done_chnl := make(chan PolicyStatus)
	go func() {
		var err error
		dr := c.C.DotReader()
		var buff [1024]byte
		var n int64
		n, err = io.CopyBuffer(article_w, dr, buff[:])
		log.WithFields(log.Fields{
			"n": n,
		}).Debug("read from connection")
		if err != nil && err != io.EOF {
			article_w.CloseWithError(err)
		} else {
			article_w.Close()
		}
		st := <-accept_chnl
		close(accept_chnl)
		// get result from storage
		err2, ok := <-store_result_chnl
		if ok && err2 != io.EOF {
			err = err2
		}
		close(store_result_chnl)
		done_chnl <- st
	}()

	// parse message and store attachments in bg
	go func(msgbody io.ReadCloser) {
		defer msgbody.Close()
		hdr, ok := <-hdr_chnl
		if !ok {
			return
		}
		// all text in this post
		// txt := new(bytes.Buffer)
		// the article itself
		// a := new(model.Article)
		var err error
		if hdr.IsMultipart() {
			var params map[string]string
			_, params, err = hdr.GetMediaType()
			if err == nil {
				boundary, ok := params["boundary"]
				if ok {
					part_r := multipart.NewReader(msgbody, boundary)
					for err == nil {
						var part *multipart.Part
						part, err = part_r.NextPart()
						if err == io.EOF {
							// we done
							break
						} else if err == nil {
							// we gots a part

							// get header
							part_hdr := part.Header

							// check for base64 encoding
							var part_body io.Reader
							if part_hdr.Get("Content-Transfer-Encoding") == "base64" {
								part_body = base64.NewDecoder(base64.StdEncoding, part)
							} else {
								part_body = part
							}

							// get content type
							content_type := part_hdr.Get("Content-Type")
							if len(content_type) == 0 {
								// assume text/plain
								content_type = "text/plain; charset=UTF8"
							}
							var part_type string
							// extract mime type
							part_type, _, err = mime.ParseMediaType(content_type)
							if err == nil {

								if part_type == "text/plain" {
									// if we are plaintext save it to the text buffer
									_, err = io.Copy(util.Discard, part_body)
								} else {
									var fpath string
									fname := part.FileName()
									fpath, err = c.storage.StoreAttachment(part_body, fname)
									if err == nil {
										// stored attachment good
										log.WithFields(log.Fields{
											"pkg":      "nntp-conn",
											"state":    &c.state,
											"version":  "1",
											"filename": fname,
											"filepath": fpath,
										}).Debug("attachment stored")
									} else {
										// failed to save attachment
										log.WithFields(log.Fields{
											"pkg":     "nntp-conn",
											"state":   &c.state,
											"version": "1",
										}).Error("failed to save attachment ", err)
									}
								}
							} else {
								// cannot read part header
								log.WithFields(log.Fields{
									"pkg":     "nntp-conn",
									"state":   &c.state,
									"version": "1",
								}).Error("bad attachment in multipart message ", err)
							}
							err = nil
							part.Close()
						} else if err != io.EOF {
							// error reading part
							log.WithFields(log.Fields{
								"pkg":     "nntp-conn",
								"state":   &c.state,
								"version": "1",
							}).Error("error reading part ", err)
						}
					}
				}
			}
		} else if hdr.IsSigned() {
			// signed message

			// discard for now
			_, err = io.Copy(util.Discard, msgbody)
		} else {
			// plaintext message
			var n int64
			n, err = io.Copy(util.Discard, msgbody)
			log.WithFields(log.Fields{
				"bytes": n,
				"pkg":   "nntp-conn",
			}).Debug("text body copied")
		}
		if err != nil && err != io.EOF {
			log.WithFields(log.Fields{
				"pkg":   "nntp-conn",
				"state": &c.state,
			}).Error("error handing message body", err)
		}
	}(article_body_r)

	// store function
	go func(r io.ReadCloser) {
		e, ok := <-store_info_chnl
		if !ok {
			// failed to get info
			// don't read anything
			r.Close()
			store_result_chnl <- io.EOF
			return
		}
		msgid := e.MessageID()
		if msgid.Valid() {
			// valid message-id
			log.WithFields(log.Fields{
				"pkg":     "nntp-conn",
				"msgid":   msgid,
				"version": "1",
				"state":   &c.state,
			}).Debug("storing article")

			fpath, err := c.storage.StoreArticle(r, msgid.String(), e.Newsgroup().String())
			r.Close()
			if err == nil {
				log.WithFields(log.Fields{
					"pkg":     "nntp-conn",
					"msgid":   msgid,
					"version": "1",
					"state":   &c.state,
				}).Debug("stored article okay to ", fpath)
				// we got the article
				if hooks != nil {
					hooks.GotArticle(msgid, e.Newsgroup())
				}
				store_result_chnl <- io.EOF
				log.Debugf("store informed")
			} else {
				// error storing article
				log.WithFields(log.Fields{
					"pkg":     "nntp-conn",
					"msgid":   msgid,
					"state":   &c.state,
					"version": "1",
				}).Error("failed to store article ", err)
				io.Copy(util.Discard, r)
				store_result_chnl <- err
			}
		} else {
			// invalid message-id
			// discard
			log.WithFields(log.Fields{
				"pkg":     "nntp-conn",
				"msgid":   msgid,
				"state":   &c.state,
				"version": "1",
			}).Warn("store will discard message with invalid message-id")
			io.Copy(util.Discard, r)
			store_result_chnl <- nil
			r.Close()
		}
	}(store_r)

	// acceptor function
	go func(r io.ReadCloser, out_w, body_w io.WriteCloser) {
		var w io.WriteCloser
		defer r.Close()
		status := PolicyAccept
		hdr, err := c.hdrio.ReadHeader(r)
		if err == nil {
			// append path
			hdr.AppendPath(c.serverName)
			// get message-id
			var msgid MessageID
			if newpost {
				// new post
				// generate it
				msgid = GenMessageID(c.serverName)
				hdr.Set("Message-ID", msgid.String())
			} else {
				// not a new post, get from header
				msgid = MessageID(hdr.MessageID())
				if msgid.Valid() {
					// check store fo existing article
					err = c.storage.HasArticle(msgid.String())
					if err == store.ErrNoSuchArticle {
						// we don't have the article
						status = PolicyAccept
						log.Infof("accept article %s", msgid)
					} else if err == nil {
						// we do have the article, reject it we don't need it again
						status = PolicyReject
					} else {
						// some other error happened
						log.WithFields(log.Fields{
							"pkg":   "nntp-conn",
							"state": c.state,
						}).Error("failed to check store for article ", err)
					}
					err = nil
				} else {
					// bad article
					status = PolicyBan
				}
			}
			// check the header if we have an acceptor and the previous checks are good
			if status.Accept() && c.acceptor != nil {
				status = c.acceptor.CheckHeader(hdr)
			}
			if status.Accept() {
				// we have accepted the article
				// store to disk
				w = out_w
			} else {
				// we have not accepted the article
				// discard
				w = util.Discard
				out_w.Close()
			}
			store_info_chnl <- ArticleEntry{msgid.String(), hdr.Newsgroup()}
			hdr_chnl <- hdr
			// close the channel for headers
			close(hdr_chnl)
			// write header out to storage
			err = c.hdrio.WriteHeader(hdr, w)
			if err == nil {
				mw := io.MultiWriter(body_w, w)
				// we wrote header
				var n int64
				if c.acceptor == nil {
					// write the rest of the body
					// we don't care about article size
					log.WithFields(log.Fields{}).Debug("copying body")
					var buff [128]byte
					n, err = io.CopyBuffer(mw, r, buff[:])
				} else {
					// we care about the article size
					max := c.acceptor.MaxArticleSize()
					var n int64
					// copy it out
					n, err = io.CopyN(mw, r, max)
					if err == nil {
						if n < max {
							// under size limit
							// we gud
							log.WithFields(log.Fields{
								"pkg":   "nntp-conn",
								"bytes": n,
								"state": &c.state,
							}).Debug("body fits")
						} else {
							// too big, discard the rest
							_, err = io.Copy(util.Discard, r)
							// ... and ban it
							status = PolicyBan
						}
					}
				}
				log.WithFields(log.Fields{
					"pkg":   "nntp-conn",
					"bytes": n,
					"state": &c.state,
				}).Debug("body wrote")
				// TODO: inform store to delete article and attachments
			} else {
				// error writing header
				log.WithFields(log.Fields{
					"msgid": msgid,
				}).Error("error writing header ", err)
			}
		} else {
			// error reading header
			// possibly a read error?
			status = PolicyDefer
		}
		// close info channel for store
		close(store_info_chnl)
		w.Close()
		// close body pipe
		body_w.Close()
		// inform result
		log.Debugf("status %s", status)
		accept_chnl <- status
		log.Debugf("informed")
	}(article_r, store_w, article_body_w)

	ps = <-done_chnl
	close(done_chnl)
	log.Debug("read article done")
	return
}
Ejemplo n.º 26
0
func (m *MediaController) handleUpload(r *http.Request, p *multipart.Part) (*model.Media, error) {
	filenameDecoded, _ := url.QueryUnescape(p.FileName())
	return model.NewMedia(p, filenameDecoded, "./static/images", "/images")
}
Ejemplo n.º 27
0
// UploadImageFile uploads an image file to a storage.
func UploadImageFile(s3UploadPath string, part *multipart.Part) error {
	mediatype, _, _ := mime.ParseMediaType(part.Header.Get("Content-Type"))
	log.Debugf("params %s", mediatype)
	log.Debug("fileName : " + part.FileName())
	inbuf, err := ioutil.ReadAll(part)
	if err != nil {
		// log.CheckErrorWithMessage(err, "Image read failed.")
		return err
	}
	// Image resize is a bottleneck. How can we improve this?
	// https://github.com/fawick/speedtest-resize said vipsthumbnail is fastest one.
	// Currenctly goyangi uses vips(https://github.com/DAddYE/vips).
	// dst, _ := image.ResizeMedium(mediatype, bytes.NewReader(inBuf))
	var dst, dstLarge, dstMedium, dstThumbnail *bytes.Buffer
	dst = bytes.NewBuffer(inbuf)
	buf, err := image.ResizeLargeVips(inbuf)
	if err != nil {
		// log.CheckErrorWithMessage(err, "Image resizing failed.")
		log.Errorf("Image large resizing failed. %s", err.Error())
		dstLarge = nil
	} else {
		dstLarge = bytes.NewBuffer(buf)
		mbuf, err := image.ResizeMediumVips(buf)
		if err != nil {
			dstMedium = nil
			log.Errorf("Image medium resizing failed. %s", err.Error())
		} else {
			dstMedium = bytes.NewBuffer(mbuf)
			tbuf, err := image.ResizeThumbnailVips(mbuf)
			if err != nil {
				dstThumbnail = nil
				log.Errorf("Image small resizing failed. %s", err.Error())
			} else {
				dstThumbnail = bytes.NewBuffer(tbuf)
			}
		}
	}

	// var thumbDst *bytes.Buffer
	// thumbBuf, err := image.ResizeThumbnailVips(buf)
	// if err != nil {
	// 	log.CheckErrorWithMessage(err, "Image thumbnailing failed.")
	// 	thumbDst = bytes.NewBuffer(buf)
	// } else {
	// 	thumbDst = bytes.NewBuffer(thumbBuf)
	// }
	basename := part.FileName()
	ext := filepath.Ext(basename)
	name := strings.TrimSuffix(basename, ext)
	originName := basename
	largeName := name + "_large" + ext
	mediumName := name + "_medium" + ext
	thumbnailName := name + "_thumbnail" + ext
	switch config.UploadTarget {
	case "LOCAL":
		err = file.SaveLocal(originName, dst)
		if dstLarge != nil {
			err = file.SaveLocal(largeName, dstLarge)
		}
		if dstMedium != nil {
			err = file.SaveLocal(mediumName, dstMedium)
		}
		if dstThumbnail != nil {
			err = file.SaveLocal(thumbnailName, dstThumbnail)
		}

	case "S3":
		switch config.Environment {
		case "DEVELOPMENT":
			fallthrough
		case "TEST":
			err = aws.PutToMyPublicTestBucket(s3UploadPath, originName, dst, mediatype)
			if dstLarge != nil {
				err = aws.PutToMyPublicTestBucket(s3UploadPath, largeName, dstLarge, mediatype)
			}
			if dstMedium != nil {
				err = aws.PutToMyPublicTestBucket(s3UploadPath, mediumName, dstMedium, mediatype)
			}
			if dstThumbnail != nil {
				err = aws.PutToMyPublicTestBucket(s3UploadPath, thumbnailName, dstThumbnail, mediatype)
			}
		case "PRODUCTION":
			err = aws.PutToMyPublicBucket(s3UploadPath, originName, dst, mediatype)
			if dstLarge != nil {
				err = aws.PutToMyPublicBucket(s3UploadPath, largeName, dstLarge, mediatype)
			}
			if dstMedium != nil {
				err = aws.PutToMyPublicBucket(s3UploadPath, mediumName, dstMedium, mediatype)
			}
			if dstThumbnail != nil {
				err = aws.PutToMyPublicBucket(s3UploadPath, thumbnailName, dstThumbnail, mediatype)
			}
		}

		// err = aws.PutToMyPublicBucket("images/", part.FileName()+"Thumbnail", thumbDst, mediatype)
	}
	if err != nil {
		// log.CheckErrorWithMessage(err, "Uploading failed.")
		return err
	}
	return nil
}
Ejemplo n.º 28
0
func readAttachmentFromMimePartAndStore(part *multipart.Part, store ArticleStore) NNTPAttachment {
	hdr := part.Header
	att := &nntpAttachment{}
	att.header = hdr
	content_type := hdr.Get("Content-Type")
	var err error
	att.mime, _, err = mime.ParseMediaType(content_type)
	att.filename = part.FileName()
	idx := strings.LastIndex(att.filename, ".")
	att.ext = ".txt"
	if idx > 0 {
		att.ext = att.filename[idx:]
	}
	h := sha512.New()
	transfer_encoding := hdr.Get("Content-Transfer-Encoding")
	var r io.Reader
	if transfer_encoding == "base64" {
		// decode
		r = base64.NewDecoder(base64.StdEncoding, part)
	} else {
		r = part
	}
	var fpath string
	var mw io.Writer
	if store == nil {
		mw = io.MultiWriter(att, h)
	} else {
		fname := randStr(10) + ".temp"
		fpath = filepath.Join(store.AttachmentDir(), fname)
		f, err := os.Create(fpath)
		if err != nil {
			log.Println("!!! failed to store attachment: ", err, "!!!")
			return nil
		}
		defer f.Close()
		if strings.ToLower(att.mime) == "text/plain" {
			mw = io.MultiWriter(f, h, att)
		} else {
			mw = io.MultiWriter(f, h)
		}
	}
	_, err = io.Copy(mw, r)
	if err != nil {
		log.Println("failed to read attachment from mimepart", err)
		if fpath != "" {
			DelFile(fpath)
		}
		return nil
	}
	hsh := h.Sum(nil)
	att.hash = hsh[:]
	enc := base32.StdEncoding
	hashstr := enc.EncodeToString(att.hash[:])
	att.filepath = hashstr + att.ext
	// we are good just return it
	if store == nil {
		return att
	}
	att_fpath := filepath.Join(store.AttachmentDir(), att.filepath)
	if !CheckFile(att_fpath) {
		// attachment isn't there
		// move it into it
		err = os.Rename(fpath, filepath.Join(store.AttachmentDir(), att.filepath))
	}
	if err != nil {
		// wtf?
		log.Println("!!! failed to store attachment", err, "!!!")
		DelFile(fpath)
	}
	return att
}
Ejemplo n.º 29
0
// uploadFile handles the upload of a single file from a multipart form.
func (h *UploadHandler) uploadFile(w http.ResponseWriter, p *multipart.Part) (fi *FileInfo) {
	fi = &FileInfo{
		Name: p.FileName(),
		Type: p.Header.Get("Content-Type"),
	}
	//
	// Validate file type
	//
	if h.Conf.AcceptFileTypes.MatchString(fi.Type) == false {
		fi.Error = "acceptFileTypes"
		return
	}
	isImage := imageRegex.MatchString(fi.Type)
	//
	// Copy into buffers for save and thumbnail generation
	//
	// Max + 1 for LimitedReader size, so we can detect below if file size is
	// greater than max.
	lr := &io.LimitedReader{R: p, N: int64(h.Conf.MaxFileSize + 1)}
	var bSave bytes.Buffer  // Buffer to be saved
	var bThumb bytes.Buffer // Buffer to be thumbnailed
	var wr io.Writer
	if isImage {
		wr = io.MultiWriter(&bSave, &bThumb)
	} else {
		wr = &bSave
	}
	_, err := io.Copy(wr, lr)
	http500(w, err)
	//
	// Validate file size
	//
	size := bSave.Len()
	if size < h.Conf.MinFileSize {
		log.Println("File failed validation: too small.", size, h.Conf.MinFileSize)
		fi.Error = "minFileSize"
		return
	} else if size > h.Conf.MaxFileSize {
		log.Println("File failed validation: too large.", size, h.Conf.MaxFileSize)
		fi.Error = "maxFileSize"
		return
	}
	//
	// Save to data store
	//
	err = (*h.Store).Create(fi, &bSave)
	http500(w, err)
	log.Println("Create", size)
	//
	// Set URLs in FileInfo
	//
	u := &url.URL{
		Path: h.Prefix + "/",
	}
	uString := u.String()
	fi.Url = uString + escape(string(fi.Key)) + "/" +
		escape(string(fi.Name))
	fi.DeleteUrl = fi.Url
	fi.DeleteType = "DELETE"
	fi.ThumbnailUrl = uString + "thumbnails/" + escape(string(fi.Key))
	//
	// Create thumbnail
	//
	if isImage && size > 0 {
		_, err = h.createThumbnail(fi, &bThumb)
		if err != nil {
			log.Println("Error creating thumbnail:", err)
		}
		// If we wanted to save thumbnails to peristent storage, this would be
		// a good spot to do it.
	}
	return
}