Example #1
0
func (p *Propolis) SignRequest(req *http.Request) {
	// gather the string to be signed

	// method
	msg := req.Method + "\n"

	// md5sum
	msg += req.Header.Get("Content-MD5") + "\n"

	// content-type
	msg += req.Header.Get("Content-Type") + "\n"

	// date
	msg += req.Header.Get("Date") + "\n"

	// add headers
	for _, key := range AWS_HEADERS {
		if value := req.Header.Get(key); value != "" {
			msg += strings.ToLower(key) + ":" + value + "\n"
		}
	}

	// resource: the path components should be URL-encoded, but not the slashes
	u := new(url.URL)
	u.Path = "/" + p.Bucket + req.URL.Path
	msg += u.String()

	// create the signature
	hmac := hmac.NewSHA1([]byte(p.Secret))
	hmac.Write([]byte(msg))

	// get a base64 encoding of the signature
	var encoded bytes.Buffer
	encoder := base64.NewEncoder(base64.StdEncoding, &encoded)
	encoder.Write(hmac.Sum())
	encoder.Close()
	signature := encoded.String()

	req.Header.Set("Authorization", "AWS "+p.Key+":"+signature)
}
Example #2
0
File: sqs.go Project: supr/sqs
func (s *SQS) query(queueUrl string, params map[string]string, resp interface{}) os.Error {
	params["Timestamp"] = time.UTC().Format(time.RFC3339)
	var url_ *url.URL
	var err os.Error
	var path string
	if queueUrl != "" {
		url_, err = url.Parse(queueUrl)
		path = queueUrl[len(s.Region.SQSEndpoint):]
	} else {
		url_, err = url.Parse(s.Region.SQSEndpoint)
		path = "/"
	}
	if err != nil {
		return err
	}

	//url_, err := url.Parse(s.Region.SQSEndpoint)
	//if err != nil {
	//	return err
	//}

	sign(s.Auth, "GET", path, params, url_.Host)
	url_.RawQuery = multimap(params).Encode()
	r, err := http.Get(url_.String())
	if err != nil {
		return err
	}
	defer r.Body.Close()

	//dump, _ := http.DumpResponse(r, true)
	//println("DUMP:\n", string(dump))
	//return nil

	if r.StatusCode != 200 {
		return buildError(r)
	}
	err = xml.Unmarshal(r.Body, resp)
	return err
}
Example #3
0
func cacheLogOutput(logUrl *url.URL) (cacheFilePath string, ok bool) {
	cacheFilePath = path.Join(cacheRoot, determineFilename(logUrl))
	// See if it already exists.
	_, err := os.Stat(cacheFilePath)
	if err == nil {
		return cacheFilePath, true
	}
	// Create a cached file.
	tempFile, err := os.Create(cacheFilePath + "-tmp")
	if err != nil {
		log.Printf("Failed to generate temp filename: %s", err)
		return
	}
	defer func() {
		tempFile.Close()
		os.Remove(tempFile.Name())
	}()
	// Do a URL request, and pipe the data into the temporary file.
	r, err := http.Get(logUrl.String())
	if err != nil {
		log.Printf("Failed to http.Get: %s", err)
		return
	}
	defer r.Body.Close()
	_, err = io.Copy(tempFile, r.Body)
	if err != nil {
		log.Printf("Failed to io.Copy HTTP: %s", err)
		return
	}
	// Move the file to it's final location.
	tempFile.Close()
	err = os.Rename(tempFile.Name(), cacheFilePath)
	if err != nil {
		log.Printf("Failed to rename temp file: %s", err)
		return
	}
	// Pipe the data through
	return cacheFilePath, true
}
Example #4
0
func getContent(url *url.URL, req *http.Request) *memcache.MemMapItem {

	cacheToken := url.String()

	cached := Repos.GetByKey(cacheToken)
	if cached != nil {
		return cached
	}
	backendUrl := getNewUrl(url)

	newReq := http.Request{
		Method:     "GET",
		RawURL:     backendUrl.String(),
		URL:        backendUrl,
		Proto:      "HTTP/1.1",
		ProtoMajor: 1,
		ProtoMinor: 0,
		RemoteAddr: "192.168.0.21",
	}

	newReq.Header = http.Header{}
	newReq.Header.Add("Accept", "*/*")
	newReq.Header.Add("Accept-Charset", "utf-8,ISO-8859-1;q=0.7,*;q=0.3")
	newReq.Header.Add("Accept-Encoding", "utf-8")
	newReq.Header.Add("Host", backendUrl.Host)

	//newReq = ResponseWriter{};

	response, err := Client.Do(&newReq)

	if err != nil {
		log.Fatal("error: ", err.String())
	}

	cacheItem := memcache.MemMapItem{Key: cacheToken}
	cacheItem.Raw, _ = ioutil.ReadAll(response.Body)
	cacheItem.Head = response.Header

	Repos.Add(&cacheItem)

	return &cacheItem
}
Example #5
0
func (p *Propolis) SendRequest(method string, reduced bool, src string, target *url.URL, body io.ReadCloser, hash string, info *os.FileInfo) (resp *http.Response, err os.Error) {
	defer func() {
		// if anything goes wrong, close the body reader
		// if it ends normally, this will be closed already and set to nil
		if body != nil {
			body.Close()
		}
	}()

	var req *http.Request
	if req, err = http.NewRequest(method, target.String(), body); err != nil {
		return
	}

	// set upload file info if applicable
	if info != nil && body != nil {
		// TODO: 0-length files fail because the Content-Length field is missing
		// a fix is in the works in the Go library
		req.ContentLength = info.Size
	}

	if info != nil {
		p.SetRequestMetaData(req, info)
	}

	// reduced redundancy?
	if reduced {
		req.Header.Set("X-Amz-Storage-Class", "REDUCED_REDUNDANCY")
	}

	// are we uploading a file with a content hash?
	if hash != "" {
		req.Header.Set("Content-MD5", hash)
	}

	// is this a copy/metadata update?
	if src != "" {
		// note: src should already be a full bucket + path name
		u := new(url.URL)
		u.Path = src
		req.Header.Set("X-Amz-Copy-Source", u.String())
		req.Header.Set("X-Amz-Metadata-Directive", "REPLACE")
	}

	// sign and execute the request
	// note: 2nd argument is temporary hack to set Content-Length: 0 when needed
	if resp, err = p.SignAndExecute(req, method == "PUT" && body == nil || (info != nil && info.Size == 0)); err != nil {
		return
	}

	// body was closed when the request was written out,
	// so nullify the deferred close
	body = nil

	if resp.StatusCode < 200 || resp.StatusCode > 299 {
		err = os.NewError(resp.Status)
		return
	}

	return
}
Example #6
0
func determineFilename(u *url.URL) string {
	h := sha1.New()
	h.Write([]byte(u.String()))
	return hex.EncodeToString(h.Sum())
}