func main() { tr := &http.Transport{} tr.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) // http.Dir()は定義したProtocolのrootディレクトリを指定 c := &http.Client{Transport: tr} r, _ := c.Get("file:///.DS_Store") // rootディレクトリの.DS_Storeにクライアントからアクセスする io.Copy(os.Stdout, r.Body) // Copy(dst,src)なのでGetした結果(r.Body)を標準出力(os.Stdout)に表示する }
func main() { flag.Parse() // Read the CLDR zip file. if *localFiles { pwd, _ := os.Getwd() *url = "file://" + path.Join(pwd, path.Base(*url)) } t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) c := &http.Client{Transport: t} resp, err := c.Get(*url) if err != nil { log.Fatalf("HTTP GET: %v", err) } if resp.StatusCode != 200 { log.Fatalf(`bad GET status for "%q": %q`, *url, resp.Status) } r := resp.Body defer r.Close() d := &cldr.Decoder{} d.SetDirFilter("main", "supplemental") d.SetSectionFilter("localeDisplayNames") data, err := d.DecodeZip(r) if err != nil { log.Fatalf("DecodeZip: %v", err) } b := builder{ data: data, group: make(map[string]*group), } b.generate() }
func main() { tr := &http.Transport{} tr.RegisterProtocol("file", http.NewFileTransport(http.Dir("."))) c := &http.Client{Transport: tr} r, _ := c.Get("file:///main.go") io.Copy(os.Stdout, r.Body) }
func New(docRoot string) (self *HTTPClient) { self = &HTTPClient{ Transport: &http.Transport{}, DocRoot: docRoot, schemes: []string{"file"}, } self.RegisterProtocol("file", http.NewFileTransport(http.Dir(self.DocRoot))) return }
func New(docRoot string) (self *DocServer) { self = &DocServer{ Transport: &http.Transport{}, DocRoot: docRoot, schemes: []string{"file"}, } self.DocRoot = "/tmp/" self.RegisterProtocol("file", http.NewFileTransport(http.Dir(self.DocRoot))) return }
func poolInit(limit int) { if *localImageDirectory != "" { transport.RegisterProtocol("file", http.NewFileTransport(http.Dir(*localImageDirectory))) } pool = make(chan bool, limit) for i := 0; i < limit; i++ { pool <- true } }
func init() { // Bug: What if there is a newline in the "wrong" place? urlFindRe = regexp.MustCompile(`href="((https?|file)://[^">#\?]+)`) // Note the back-quotes fileRe = regexp.MustCompile(`(file)://`) urlRejectRe = regexp.MustCompile(`\.(css|ico|js|py|pdf|png|mp3|mp4|jpg|jpeg|swf|exe|dll|so|lib)\/?$`) t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) fileClient = &http.Client{Transport: t} }
func main() { proxy := httputil.ReverseProxy{ Transport: http.NewFileTransport(http.Dir(CACHE_PATH)), Director: director, } http.HandleFunc("/", proxy.ServeHTTP) log.Println("Running...") panic(http.ListenAndServe(":8080", nil)) }
// newStaticPageHandler returns a staticPageHandles with the contents of pagePath loaded and ready to serve func newStaticPageHandler(errorPage string, defaultErrorPage string) *staticPageHandler { t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) c := &http.Client{Transport: t} s := &staticPageHandler{c: c} if err := s.loadUrl(errorPage); err != nil { s.loadUrl(defaultErrorPage) } return s }
func handleInit() { pool := thumbnail.NewPool(*maxImageThreads, 1) transport := &http.Transport{Proxy: http.ProxyFromEnvironment} if *localImageDirectory != "" { transport.RegisterProtocol("file", http.NewFileTransport(http.Dir(*localImageDirectory))) } client := &http.Client{Transport: http.RoundTripper(transport), Timeout: *fetchTimeout} http.Handle("/", thumbnail.NewProxy(director, pool, *maxPrefetch+*maxImageThreads, client)) }
func fetch(_url *url.URL) ([]byte, error) { t := &http.Transport{} if isWindows() { t.RegisterProtocol("file", http.NewFileTransport(http.Dir(""))) } else { t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) } c := &http.Client{Transport: t} res, err := c.Get(_url.String()) if err != nil { return nil, err } defer res.Body.Close() contents, err := ioutil.ReadAll(res.Body) if err != nil { return nil, err } return contents, nil }
// openReader opens the url or file given by url and returns it as an io.ReadCloser // or nil on error. func openReader(url string) io.ReadCloser { if *localFiles { pwd, _ := os.Getwd() url = "file://" + path.Join(pwd, path.Base(url)) } t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) c := &http.Client{Transport: t} resp, err := c.Get(url) Error(err) if resp.StatusCode != 200 { Error(fmt.Errorf(`bad GET status for "%s": %s`, url, resp.Status)) } return resp.Body }
// openReader opens the URL or file given by url and returns it as an io.ReadCloser // or nil on error. func openReader(url *string) (io.ReadCloser, error) { if *localFiles { pwd, _ := os.Getwd() *url = "file://" + path.Join(pwd, path.Base(*url)) } t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) c := &http.Client{Transport: t} resp, err := c.Get(*url) if err != nil { return nil, err } if resp.StatusCode != 200 { return nil, fmt.Errorf(`bad GET status for "%s": %s`, *url, resp.Status) } return resp.Body, nil }
func NewFixturesTransport(dir string) (*FixturesTransport, error) { if dir == "" { dir = filepath.Join( build.Default.GOPATH, "src", "github.com/omise/omise-go", "testdata/fixtures", ) } if fi, e := os.Lstat(dir); e != nil { return nil, e } else if !fi.IsDir() { return nil, errors.New(dir + " is not a directory.") } backing := http.NewFileTransport(http.Dir(dir)) return &FixturesTransport{backing, dir}, nil }
// stubHTTPGet intercepts a call to http.Get and rewrites it to use // "file://" to get the profile directly from a file. func stubHTTPGet(source string, _ time.Duration) (*http.Response, error) { url, err := url.Parse(source) if err != nil { return nil, err } values := url.Query() file := values.Get("file") if file == "" { return nil, fmt.Errorf("want .../file?profile, got %s", source) } t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("testdata/"))) c := &http.Client{Transport: t} return c.Get("file:///" + file) }
// fetch gets the contents at a given URL. The URL can point to a local file. // Errors terminate. func fetch(url string) []byte { // Make a client that can load files if given a file:// URL. t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) c := &http.Client{Transport: t} // Download the config file from a well-known location. resp, err := c.Get(url) if err != nil { log.Fatalln(err) } defer resp.Body.Close() body, err := ioutil.ReadAll(resp.Body) if err != nil { log.Fatalln(err) } return body }
func (s *ValidateSuite) makeLocalMetadata(c *gc.C, id, region, series, endpoint string) error { im := ImageMetadata{ Id: id, Arch: "amd64", } cloudSpec := CloudSpec{ Region: region, Endpoint: endpoint, } _, err := MakeBoilerplate("", series, &im, &cloudSpec, false) if err != nil { return err } t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) s.oldClient = SetHttpClient(&http.Client{Transport: t}) return nil }
func TestFileTransport(t *testing.T) { check := checker(t) dname, err := ioutil.TempDir("", "") check("TempDir", err) fname := filepath.Join(dname, "foo.txt") err = ioutil.WriteFile(fname, []byte("Bar"), 0644) check("WriteFile", err) defer os.Remove(dname) defer os.Remove(fname) tr := &http.Transport{} tr.RegisterProtocol("file", http.NewFileTransport(http.Dir(dname))) c := &http.Client{Transport: tr} fooURLs := []string{"file:///foo.txt", "file://../foo.txt"} for _, urlstr := range fooURLs { res, err := c.Get(urlstr) check("Get "+urlstr, err) if res.StatusCode != 200 { t.Errorf("for %s, StatusCode = %d, want 200", urlstr, res.StatusCode) } if res.ContentLength != -1 { t.Errorf("for %s, ContentLength = %d, want -1", urlstr, res.ContentLength) } if res.Body == nil { t.Fatalf("for %s, nil Body", urlstr) } slurp, err := ioutil.ReadAll(res.Body) check("ReadAll "+urlstr, err) if string(slurp) != "Bar" { t.Errorf("for %s, got content %q, want %q", urlstr, string(slurp), "Bar") } } const badURL = "file://../no-exist.txt" res, err := c.Get(badURL) check("Get "+badURL, err) if res.StatusCode != 404 { t.Errorf("for %s, StatusCode = %d, want 404", badURL, res.StatusCode) } }
func fileServerOnDocRoot(ds *DocServer) http.RoundTripper { return http.NewFileTransport(http.Dir(ds.DocRoot)) }
func init() { // Make the images data accessible through the "file" protocol. http.DefaultTransport.(*http.Transport).RegisterProtocol("file", http.NewFileTransport(http.Dir("testdata"))) }
// registerFileProtocol registers support for file:// URLs on the given transport. func registerFileProtocol(transport *http.Transport) { transport.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) }
// get a an HTTP client to retrieve URL's. This client allows the use of `file` // scheme since we may need to fetch files from users filesystem func getHTTPClient() Client { transport := &http.Transport{} transport.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) return &http.Client{Transport: transport} }
func NewWOFClone(source string, dest string, procs int, logger *log.WOFLogger) (*WOFClone, error) { // https://golang.org/src/net/http/filetransport.go u, err := url.Parse(source) if err != nil { return nil, err } var cl *http.Client if u.Scheme == "file" { root := u.Path if !strings.HasSuffix(root, "/") { root = root + "/" } /* Pay attention to what's going here. Absent tweaking the URL to fetch in the 'Fetch' method the following will not work. In order to make this working *without* tweaking the URL you would need to specifiy the root as '/' which just seems like a bad idea. The fear of blindly opening up the root level directory on the file system in this context may seem a bit premature (not to mention silly) but measure twice and all that good stuff... See also: https://code.google.com/p/go/issues/detail?id=2113 (20160112/thisisaaronland) */ t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir(root))) cl = &http.Client{Transport: t} } else { cl = &http.Client{} } runtime.GOMAXPROCS(procs) workpool, _ := tunny.CreatePoolGeneric(procs).Open() retries := pool.NewLIFOPool() /* This gets triggered in the 'Process' function to ensure that we don't exit out of 'CloneMetaFile' before all the goroutines to write new files to disk actually finish ... you know, writing to disk (20160606/thisisaaronland) */ writesync := new(sync.WaitGroup) ch := make(chan bool) c := WOFClone{ Success: 0, Error: 0, Skipped: 0, Source: source, Dest: dest, Logger: logger, MaxRetries: 25.0, // maybe allow this to be user-defined ? client: cl, workpool: workpool, writesync: writesync, retries: retries, timer: time.Now(), done: ch, } go func(c *WOFClone) { for { select { case <-c.done: break case <-time.After(1 * time.Second): c.Status() } } }(&c) return &c, nil }
func init() { http.DefaultTransport.(*http.Transport).RegisterProtocol("file", http.NewFileTransport(http.Dir("testdata"))) }
// Load the data form NormalizationTest.txt func loadTestData() { if *localFiles { pwd, _ := os.Getwd() *url = "file://" + path.Join(pwd, file) } t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) c := &http.Client{Transport: t} resp, err := c.Get(*url) if err != nil { logger.Fatal(err) } if resp.StatusCode != 200 { logger.Fatal("bad GET status for "+file, resp.Status) } f := resp.Body defer f.Close() input := bufio.NewReader(f) for { line, err := input.ReadString('\n') if err != nil { if err == io.EOF { break } logger.Fatal(err) } if len(line) == 0 || line[0] == '#' { continue } m := partRe.FindStringSubmatch(line) if m != nil { if len(m) < 3 { logger.Fatal("Failed to parse Part: ", line) } i, err := strconv.Atoi(m[1]) if err != nil { logger.Fatal(err) } name := m[2] part = append(part, Part{name: name[:len(name)-1], number: i}) continue } m = testRe.FindStringSubmatch(line) if m == nil || len(m) < 7 { logger.Fatalf(`Failed to parse: "%s" result: %#v`, line, m) } test := Test{name: m[6], partnr: len(part) - 1, number: counter} counter++ for j := 1; j < len(m)-1; j++ { for _, split := range strings.Split(m[j], " ") { r, err := strconv.ParseUint(split, 16, 64) if err != nil { logger.Fatal(err) } if test.r == 0 { // save for CharacterByCharacterTests test.r = rune(r) } var buf [utf8.UTFMax]byte sz := utf8.EncodeRune(buf[:], rune(r)) test.cols[j-1] += string(buf[:sz]) } } part := &part[len(part)-1] part.tests = append(part.tests, test) } }
func NewFixturesTransport() (*FixturesTransport, error) { backing := http.NewFileTransport(http.Dir(FixtureBasePath)) return &FixturesTransport{backing}, nil }
func loadTestData() []Test { if *localFiles { pwd, _ := os.Getwd() *url = "file://" + path.Join(pwd, path.Base(*url)) } t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) c := &http.Client{Transport: t} resp, err := c.Get(*url) Error(err) if resp.StatusCode != 200 { log.Fatalf(`bad GET status for "%s": %s`, *url, resp.Status) } f := resp.Body buffer, err := ioutil.ReadAll(f) f.Close() Error(err) archive, err := zip.NewReader(bytes.NewReader(buffer), int64(len(buffer))) Error(err) tests := []Test{} for _, f := range archive.File { // Skip the short versions, which are simply duplicates of the long versions. if strings.Contains(f.Name, "SHORT") || f.FileInfo().IsDir() { continue } ff, err := f.Open() Error(err) defer ff.Close() input := bufio.NewReader(ff) test := Test{name: path.Base(f.Name)} for { line, err := input.ReadString('\n') if err != nil { if err == io.EOF { break } log.Fatal(err) } if len(line) <= 1 || line[0] == '#' { if m := versionRe.FindStringSubmatch(line); m != nil { if m[1] != unicode.Version { log.Printf("warning:%s: version is %s; want %s", f.Name, m[1], unicode.Version) } } continue } m := testRe.FindStringSubmatch(line) if m == nil || len(m) < 3 { log.Fatalf(`Failed to parse: "%s" result: %#v`, line, m) } str := "" for _, split := range strings.Split(m[1], " ") { r, err := strconv.ParseUint(split, 16, 64) Error(err) str += string(rune(r)) } test.str = append(test.str, str) test.comment = append(test.comment, m[2]) } tests = append(tests, test) } return tests }
func (c *ValidateImageMetadataCommand) Run(context *cmd.Context) error { var params *imagemetadata.MetadataLookupParams if c.providerType == "" { environ, err := environs.NewFromName(c.EnvName) if err != nil { return err } mdLookup, ok := environ.(imagemetadata.ImageMetadataValidator) if !ok { return fmt.Errorf("%s provider does not support image metadata validation", environ.Config().Type()) } params, err = mdLookup.MetadataLookupParams(c.region) if err != nil { return err } } else { prov, err := environs.Provider(c.providerType) if err != nil { return err } mdLookup, ok := prov.(imagemetadata.ImageMetadataValidator) if !ok { return fmt.Errorf("%s provider does not support image metadata validation", c.providerType) } params, err = mdLookup.MetadataLookupParams(c.region) if err != nil { return err } } if c.series != "" { params.Series = c.series } if c.region != "" { params.Region = c.region } if c.endpoint != "" { params.Endpoint = c.endpoint } // If the metadata files are to be loaded from a directory, we need to register // a file http transport. if c.metadataDir != "" { if _, err := os.Stat(c.metadataDir); err != nil { return err } params.BaseURLs = []string{"file://" + c.metadataDir} t := &http.Transport{} t.RegisterProtocol("file", http.NewFileTransport(http.Dir("/"))) c := &http.Client{Transport: t} imagemetadata.SetHttpClient(c) } image_ids, err := imagemetadata.ValidateImageMetadata(params) if err != nil { return err } if len(image_ids) > 0 { fmt.Fprintf(context.Stdout, "matching image ids for region %q:\n%s\n", params.Region, strings.Join(image_ids, "\n")) } else { return fmt.Errorf("no matching image ids for region %s using URLs:\n%s", params.Region, strings.Join(params.BaseURLs, "\n")) } return nil }