Exemple #1
0
// expandBundlePath calls filepath.Glob first, then recursively adds
// descendents of any directories specified.
func expandBundlePath(pathGlob string) ([]string, error) {
	paths, err := filepath.Glob(pathGlob)
	if err != nil {
		return nil, err
	}

	for _, path := range paths {
		fi, err := os.Stat(path)
		if err != nil {
			return nil, err
		}

		if fi.Mode().IsDir() {
			w := fs.Walk(path)
			for w.Step() {
				if err := w.Err(); err != nil {
					return nil, err
				}
				paths = append(paths, w.Path())
			}
		}
	}

	return paths, nil
}
Exemple #2
0
func main() {

	if len(os.Args) > 1 {
		starting_path = os.Args[1]
	}

	w := new(tabwriter.Writer)
	w.Init(os.Stdout, 2, 0, 1, ' ', tabwriter.AlignRight)

	walker := fs.Walk(starting_path)
	for walker.Step() {
		if err := walker.Err(); err != nil {
			fmt.Fprintln(os.Stderr, err)
			continue
		}
		path := walker.Path()
		if REGEX_EXCLUDE_PATH.FindStringSubmatch(path) != nil {
			continue
		}

		info := walker.Stat()
		file_type := "f"
		if info.IsDir() {
			file_type = "d"
		}

		fmt.Fprintln(w, file_type+" \t"+(info.ModTime().Format(DATE_LAYOUT)+" \t"+strconv.FormatInt(info.Size(), 10)+"\t\t"+path))
	}
	w.Flush()
}
Exemple #3
0
func (w *Walker) Walk() fusefs.Tree {
	wg := sync.WaitGroup{}

	paths := make(chan string, runtime.NumCPU())
	for i := 0; i < runtime.NumCPU(); i++ {
		go worker(w, paths, &wg)
	}

	walker := fs.Walk(w.Path)
	for walker.Step() {
		if err := walker.Err(); err != nil {
			continue
		}

		if walker.Stat().IsDir() {
			continue
		}

		wg.Add(1)
		paths <- walker.Path()
	}

	close(paths)
	wg.Wait()

	return w.tree
}
Exemple #4
0
func RewriteImports(path string, rw func(string) string, filter func(string) bool) error {
	w := fs.Walk(path)
	for w.Step() {
		rel := w.Path()[len(path):]
		if len(rel) == 0 {
			continue
		}
		rel = rel[1:]

		if strings.HasPrefix(rel, ".git") || strings.HasPrefix(rel, "vendor") {
			w.SkipDir()
			continue
		}

		if !strings.HasSuffix(w.Path(), ".go") {
			continue
		}

		if !filter(rel) {
			continue
		}

		err := rewriteImportsInFile(w.Path(), rw)
		if err != nil {
			fmt.Println("rewrite error: ", err)
		}
	}
	return nil
}
Exemple #5
0
func copySrc(dir string, g *Godeps) error {
	ok := true
	for _, dep := range g.Deps {
		w := fs.Walk(dep.dir)
		for w.Step() {
			if w.Err() != nil {
				log.Println(w.Err())
				ok = false
				continue
			}
			if c := w.Stat().Name()[0]; c == '.' || c == '_' {
				// Skip directories using a rule similar to how
				// the go tool enumerates packages.
				// See $GOROOT/src/cmd/go/main.go:/matchPackagesInFs
				w.SkipDir()
			}
			if w.Stat().IsDir() {
				continue
			}
			dst := filepath.Join(dir, w.Path()[len(dep.ws)+1:])
			if err := copyFile(dst, w.Path()); err != nil {
				log.Println(err)
				ok = false
			}
		}
	}
	if !ok {
		return errors.New("error copying source code")
	}
	return nil
}
Exemple #6
0
func TestBug3486(t *testing.T) { // http://code.google.com/p/go/issues/detail?id=3486
	root, err := filepath.EvalSymlinks(runtime.GOROOT())
	if err != nil {
		t.Fatal(err)
	}
	lib := filepath.Join(root, "lib")
	src := filepath.Join(root, "src")
	seenSrc := false
	walker := fs.Walk(root)
	for walker.Step() {
		if walker.Err() != nil {
			t.Fatal(walker.Err())
		}

		switch walker.Path() {
		case lib:
			walker.SkipDir()
		case src:
			seenSrc = true
		}
	}
	if !seenSrc {
		t.Fatalf("%q not seen", src)
	}
}
Exemple #7
0
func copySrc(dir string, deps []Dependency) error {
	ok := true
	for _, dep := range deps {
		srcdir := filepath.Join(dep.ws, "src")
		rel, err := filepath.Rel(srcdir, dep.dir)
		if err != nil { // this should never happen
			return err
		}
		dstpkgroot := filepath.Join(dir, rel)
		err = os.RemoveAll(dstpkgroot)
		if err != nil {
			log.Println(err)
			ok = false
		}
		w := fs.Walk(dep.dir)
		for w.Step() {
			err = copyPkgFile(dir, srcdir, w)
			if err != nil {
				log.Println(err)
				ok = false
			}
		}
	}
	if !ok {
		return errors.New("error copying source code")
	}
	return nil
}
Exemple #8
0
// Walk through root and hash each file, return a map with
// filepath for key and hash for value, check only file not dir
func marcher(dir string, method string) map[string]string {
	fmt.Println("")
	fmt.Println("Scanner Report")
	fmt.Println("==============")
	fmt.Println("")
	res := make(map[string]string)
	walker := fs.Walk(dir)
	for walker.Step() {
		// Start walking
		if err := walker.Err(); err != nil {
			fmt.Fprintln(os.Stderr, err)
			continue
		}
		// Check if it is a file
		finfo, err := os.Stat(walker.Path())
		if err != nil {
			fmt.Println(err)
			continue
		}
		if finfo.IsDir() {
			// it's a dir so pass and continue
			continue
		} else {
			// it's a file so add couple to data map
			path := walker.Path()
			hash := hasher(walker.Path(), method)
			res[path] = hash
			fmt.Println(hash, "\t", path)
		}
	}
	fmt.Println("")
	return res
}
Exemple #9
0
func readArticles() ([]*Article, []string, error) {
	timeStart := time.Now()
	walker := fs.Walk("blog_posts")
	var res []*Article
	var dirs []string
	for walker.Step() {
		if walker.Err() != nil {
			fmt.Printf("readArticles: walker.Step() failed with %s\n", walker.Err())
			return nil, nil, walker.Err()
		}
		st := walker.Stat()
		path := walker.Path()
		if st.IsDir() {
			dirs = append(dirs, path)
			continue
		}
		a, err := readArticle(path)
		if err != nil {
			fmt.Printf("readArticle() of %s failed with %s\n", path, err)
			return nil, nil, err
		}
		if a != nil {
			res = append(res, a)
		}
	}
	fmt.Printf("read %d articles in %s\n", len(res), time.Since(timeStart))
	return res, dirs, nil
}
Exemple #10
0
func RewriteImports(path string, rw func(string) string, filter func(string) bool) error {
	w := fs.Walk(path)
	for w.Step() {
		rel := w.Path()[len(path):]
		if len(rel) == 0 {
			continue
		}
		rel = rel[1:]

		if strings.HasPrefix(rel, ".git") || strings.HasPrefix(rel, "vendor") {
			w.SkipDir()
			continue
		}

		if !filter(rel) {
			continue
		}

		dir, fi := filepath.Split(w.Path())
		good, err := build.Default.MatchFile(dir, fi)
		if err != nil {
			return err
		}
		if !good {
			continue
		}

		err = rewriteImportsInFile(w.Path(), rw)
		if err != nil {
			fmt.Println("rewrite error: ", err)
			return err
		}
	}
	return nil
}
Exemple #11
0
func (r *Radio) StdPopulate() error {
	// Add a dummy manual playlist
	r.NewPlaylist("manual")

	// Add local directory
	playlistsDirs := []string{"/playlists"}
	playlistsDirs = append(playlistsDirs, path.Join(os.Getenv("HOME"), "playlists"))
	playlistsDirs = append(playlistsDirs, path.Join("/home", "playlists"))
	dir, err := os.Getwd()
	if err == nil && os.Getenv("NO_LOCAL_PLAYLISTS") != "1" {
		r.NewDirectoryPlaylist("local directory", dir)
		playlistsDirs = append(playlistsDirs, path.Join(dir, "playlists"))
	}

	// Add each folders in '/playlists' and './playlists'
	for _, playlistsDir := range playlistsDirs {
		walker := fs.Walk(playlistsDir)
		for walker.Step() {
			if walker.Path() == playlistsDir {
				continue
			}
			if err := walker.Err(); err != nil {
				logrus.Warnf("walker error: %v", err)
				continue
			}

			var realpath string
			if walker.Stat().IsDir() {
				realpath = walker.Path()
				walker.SkipDir()
			} else {
				realpath, err = filepath.EvalSymlinks(walker.Path())
				if err != nil {
					logrus.Warnf("filepath.EvalSymlinks error for %q: %v", walker.Path(), err)
					continue
				}
			}

			stat, err := os.Stat(realpath)
			if err != nil {
				logrus.Warnf("os.Stat error: %v", err)
				continue
			}
			if stat.IsDir() {
				r.NewDirectoryPlaylist(fmt.Sprintf("playlist: %s", walker.Stat().Name()), realpath)
			}
		}
	}

	// Add 'standard' music paths
	r.NewDirectoryPlaylist("iTunes Music", "~/Music/iTunes/iTunes Media/Music/")
	r.NewDirectoryPlaylist("iTunes Podcasts", "~/Music/iTunes/iTunes Media/Podcasts/")
	r.NewDirectoryPlaylist("iTunes Music", "/home/Music/iTunes/iTunes Media/Music/")
	r.NewDirectoryPlaylist("iTunes Podcasts", "/home/Music/iTunes/iTunes Media/Podcasts/")

	return nil
}
Exemple #12
0
func ExampleWalker() {
	walker := fs.Walk("/usr/lib")
	for walker.Step() {
		if err := walker.Err(); err != nil {
			fmt.Fprintln(os.Stderr, err)
			continue
		}
		fmt.Println(walker.Path())
	}
}
Exemple #13
0
// Get all python source files under dir
func pythonSourceFiles(dir string, discoveredScripts map[string]bool) (files []string) {
	walker := fs.Walk(dir)
	for walker.Step() {
		if err := walker.Err(); err == nil && !walker.Stat().IsDir() && filepath.Ext(walker.Path()) == ".py" {
			file := walker.Path()
			_, found := discoveredScripts[file]

			if !found {
				files = append(files, filepath.ToSlash(file))
				discoveredScripts[file] = true
			}
		}
	}
	return
}
Exemple #14
0
// rewriteTree recursively visits the go files in path, rewriting
// import statments according to the rules for func qualify.
func rewriteTree(path, qual string, paths []string) error {
	w := fs.Walk(path)
	for w.Step() {
		if w.Err() != nil {
			log.Println("rewrite:", w.Err())
			continue
		}
		if !w.Stat().IsDir() && strings.HasSuffix(w.Path(), ".go") {
			err := rewriteGoFile(w.Path(), qual, paths)
			if err != nil {
				return err
			}
		}
	}
	return nil
}
Exemple #15
0
func (p *Playlist) AutoUpdate() error {
	if p.Path == "" {
		logrus.Debugf("Playlist %q is not dynamic, skipping update", p.Name)
		return nil
	}

	// if we are here, the playlist is based on local file system
	logrus.Infof("Updating playlist %q", p.Name)

	p.Status = "updating"

	walker := fs.Walk(p.Path)

	for walker.Step() {
		if err := walker.Err(); err != nil {
			logrus.Warnf("walker error: %v", err)
			continue
		}
		stat := walker.Stat()

		if stat.IsDir() {
			switch stat.Name() {
			case ".git", "bower_components":
				walker.SkipDir()
			}
		} else {
			switch stat.Name() {
			case ".DS_Store":
				continue
			}

			p.NewLocalTrack(walker.Path())
		}
	}

	logrus.Infof("Playlist %q updated, %d tracks", p.Name, len(p.Tracks))
	if p.Stats.Tracks > 0 {
		p.Status = "ready"
	} else {
		p.Status = "empty"
	}
	p.ModificationDate = time.Now()

	return nil
}
Exemple #16
0
func (f *Finder) List(ignorePatterns []string) []string {
	fileList := []string{}

	for _, location := range f.locations {
		walker := fs.Walk(location)

		for walker.Step() {
			err := walker.Err()

			if err != nil || filenameMatchPatterns(walker.Path(), ignorePatterns) {
				continue
			}

			fileList = append(fileList, walker.Path())
		}
	}

	return fileList
}
Exemple #17
0
func main() {
	var g regexp.Grep
	g.AddFlags()
	g.Stdout = os.Stdout
	g.Stderr = os.Stderr
	flag.Usage = usage
	flag.Parse()
	args := flag.Args()
	if len(args) == 0 {
		flag.Usage()
	}

	pat := "(?m)" + strings.Join(args, ".*")
	if *iflag {
		pat = "(?i)" + pat
	}
	re, err := regexp.Compile(pat)
	if err != nil {
		fmt.Fprintln(os.Stderr, err)
		os.Exit(1)
	}
	g.Regexp = re
	walker := fs.Walk(".")
	for walker.Step() {
		if walker.Stat().IsDir() {
			if strings.Contains(walker.Path(), ".git") {
				walker.SkipDir()
			}
			continue
		}
		g.File(walker.Path())
	}
	if err := walker.Err(); err != nil {
		fmt.Fprintln(os.Stderr, err)
		os.Exit(1)
	}

	if !g.Match {
		os.Exit(1)
	}
}
Exemple #18
0
func visit(filename string) error {
	walker := fs.Walk(filename)
	for walker.Step() {
		if err := walker.Err(); err != nil {
			fmt.Fprintln(os.Stderr, err)
			continue
		}
		path := walker.Path()
		info := walker.Stat()
		if info.IsDir() {
			if isHidden(path) {
				walker.SkipDir()
			}
		}
		if info.Mode()&os.ModeSymlink == os.ModeSymlink {
			walker.SkipDir()
		}
		if info.Mode().IsRegular() {
			wg.Add(1)
			go parseFile(path)
		}
	}
	return nil
}
Exemple #19
0
func copySrc(dir string, deps []Dependency) error {
	// mapping to see if we visited a parent directory already
	visited := make(map[string]bool)
	ok := true
	for _, dep := range deps {
		debugln("copySrc for", dep.ImportPath)
		srcdir := filepath.Join(dep.ws, "src")
		rel, err := filepath.Rel(srcdir, dep.dir)
		debugln("srcdir", srcdir)
		debugln("rel", rel)
		debugln("err", err)
		if err != nil { // this should never happen
			return err
		}
		dstpkgroot := filepath.Join(dir, rel)
		err = os.RemoveAll(dstpkgroot)
		if err != nil {
			log.Println(err)
			ok = false
		}

		// copy actual dependency
		vf := dep.vcs.listFiles(dep.dir)
		debugln("vf", vf)
		w := fs.Walk(dep.dir)
		for w.Step() {
			err = copyPkgFile(vf, dir, srcdir, w)
			if err != nil {
				log.Println(err)
				ok = false
			}
		}

		// Look for legal files in root
		//  some packages are imports as a sub-package but license info
		//  is at root:  exampleorg/common has license file in exampleorg
		//
		if dep.ImportPath == dep.root {
			// we are already at root
			continue
		}

		// prevent copying twice This could happen if we have
		//   two subpackages listed someorg/common and
		//   someorg/anotherpack which has their license in
		//   the parent dir of someorg
		rootdir := filepath.Join(srcdir, filepath.FromSlash(dep.root))
		if visited[rootdir] {
			continue
		}
		visited[rootdir] = true
		vf = dep.vcs.listFiles(rootdir)
		w = fs.Walk(rootdir)
		for w.Step() {
			fname := filepath.Base(w.Path())
			if IsLegalFile(fname) && !strings.Contains(w.Path(), sep) {
				err = copyPkgFile(vf, dir, srcdir, w)
				if err != nil {
					log.Println(err)
					ok = false
				}
			}
		}
	}

	if !ok {
		return errorCopyingSourceCode
	}

	return nil
}
Exemple #20
0
// CrawlROMs crawls the rom directory and processes the files.
func CrawlROMs(gl *rom.GameListXML, sources []ds.DS, xmlOpts *rom.XMLOpts, gameOpts *rom.GameOpts) error {
	var ct http.RoundTripper = NewCancelTransport(http.DefaultTransport.(*http.Transport))
	http.DefaultClient.Transport = ct

	existing := make(map[string]struct{})

	if !dirExists(xmlOpts.RomDir) {
		log.Printf("ERR %s: does not exists", xmlOpts.RomDir)
		return nil
	}

	extraMap := make(map[string]struct{})
	if *extraExt != "" {
		extraSlice := strings.Split(*extraExt, ",")
		for _, e := range extraSlice {
			if e[0] != '.' {
				extraMap["."+e] = struct{}{}
			} else {
				extraMap[e] = struct{}{}
			}
		}
	}

	for _, x := range gl.GameList {
		switch {
		case *appendOut:
			p, err := filepath.Rel(xmlOpts.RomXMLDir, x.Path)
			if err != nil {
				log.Printf("Can't find original path: %s", x.Path)
			}
			f := filepath.Join(xmlOpts.RomDir, p)
			existing[f] = struct{}{}
		case *refreshOut:
			existing[x.Path] = struct{}{}
		}
	}

	var wg sync.WaitGroup
	results := make(chan *rom.GameXML, *workers)
	roms := make(chan *rom.ROM, 2**workers)
	for i := 0; i < *workers; i++ {
		wg.Add(1)
		go worker(sources, xmlOpts, gameOpts, results, roms, &wg)
	}
	go func() {
		defer wg.Done()
		for r := range results {
			if _, ok := existing[r.Path]; ok && *refreshOut {
				for i, g := range gl.GameList {
					if g.Path != r.Path {
						continue
					}
					copy(gl.GameList[i:], gl.GameList[i+1:])
					gl.GameList = gl.GameList[:len(gl.GameList)-1]
				}
			}
			gl.Append(r)
		}
	}()
	var stop bool
	sig := make(chan os.Signal, 1)
	signal.Notify(sig, os.Interrupt)
	defer signal.Stop(sig)
	go func() {
		for {
			<-sig
			if !stop {
				stop = true
				log.Println("Stopping, ctrl-c again to stop now.")
				ct.(*CancelTransport).Stop()
				for _ = range roms {
				}
				continue
			}
			panic("AHHHH!")
		}
	}()
	bins := make(map[string]struct{})
	if !*mame {
		walker := fs.Walk(xmlOpts.RomDir)
		for walker.Step() {
			if stop {
				break
			}
			if err := walker.Err(); err != nil {
				return err
			}
			f := walker.Path()
			if b := filepath.Base(f); b != "." && strings.HasPrefix(b, ".") {
				walker.SkipDir()
				continue
			}
			r, err := rom.NewROM(f)
			if err != nil {
				log.Printf("ERR: Processing: %s, %s", f, err)
				continue
			}
			if !r.Cue {
				continue
			}
			for _, b := range r.Bins {
				bins[b] = struct{}{}
			}
			bins[f] = struct{}{}
			if _, ok := existing[f]; !*refreshOut && ok {
				log.Printf("INFO: Skipping %s, already in gamelist.", f)
				continue
			}
			roms <- r
		}
	}
	walker := fs.Walk(xmlOpts.RomDir)
	for walker.Step() {
		if stop {
			break
		}
		if err := walker.Err(); err != nil {
			return err
		}
		f := walker.Path()
		if b := filepath.Base(f); b != "." && strings.HasPrefix(b, ".") {
			walker.SkipDir()
			continue
		}
		if _, ok := existing[f]; !*refreshOut && ok {
			log.Printf("INFO: Skipping %s, already in gamelist.", f)
			continue
		}
		r, err := rom.NewROM(f)
		if err != nil {
			log.Printf("ERR: Processing: %s, %s", f, err)
			continue
		}
		_, isExtra := extraMap[r.Ext]
		if *mame {
			if r.Ext == ".zip" || r.Ext == ".7z" || isExtra {
				roms <- r
			}
			continue
		}
		_, ok := bins[f]
		if !ok && (rh.KnownExt(r.Ext) || r.Ext == ".svm" || isExtra) {
			roms <- r
		}
	}
	close(roms)
	wg.Wait()
	wg.Add(1)
	close(results)
	wg.Wait()
	if stop {
		return UserCanceled
	} else {
		return nil
	}
}
Exemple #21
0
// CrawlROMs crawls the rom directory and processes the files.
func CrawlROMs(gl *rom.GameListXML, sources []ds.DS, xmlOpts *rom.XMLOpts, gameOpts *rom.GameOpts) error {
	var missingCSV *csv.Writer
	var gdbDS *ds.GDB
	if *missing != "" {
		f, err := os.Create(*missing)
		if err != nil {
			return err
		}
		missingCSV = csv.NewWriter(f)
		defer func() {
			missingCSV.Flush()
			if err := missingCSV.Error(); err != nil {
				log.Fatal(err)
			}
			f.Close()
		}()
		if err := missingCSV.Write([]string{"Game", "Error", "Hash", "Extra"}); err != nil {
			return err
		}
		for _, d := range sources {
			switch d := d.(type) {
			case *ds.GDB:
				gdbDS = d
			}
		}
	}
	var ct http.RoundTripper = NewCancelTransport(http.DefaultTransport.(*http.Transport))
	http.DefaultClient.Transport = ct

	existing := make(map[string]struct{})

	if !dirExists(xmlOpts.RomDir) {
		log.Printf("ERR %s: does not exists", xmlOpts.RomDir)
		return nil
	}

	extraMap := make(map[string]struct{})
	if *extraExt != "" {
		extraSlice := strings.Split(*extraExt, ",")
		for _, e := range extraSlice {
			if e[0] != '.' {
				extraMap["."+e] = struct{}{}
			} else {
				extraMap[e] = struct{}{}
			}
		}
	}

	for _, x := range gl.GameList {
		switch {
		case *appendOut:
			p, err := filepath.Rel(xmlOpts.RomXMLDir, x.Path)
			if err != nil {
				log.Printf("Can't find original path: %s", x.Path)
			}
			f := filepath.Join(xmlOpts.RomDir, p)
			existing[f] = struct{}{}
		case *refreshOut:
			existing[x.Path] = struct{}{}
		}
	}

	var wg sync.WaitGroup
	results := make(chan Result, *workers)
	roms := make(chan *rom.ROM, 2**workers)
	for i := 0; i < *workers; i++ {
		wg.Add(1)
		go worker(sources, xmlOpts, gameOpts, results, roms, &wg)
	}
	go func() {
		defer wg.Done()
		for r := range results {
			if r.XML == nil {
				if *missing == "" {
					continue
				}
				files := []string{r.ROM.Path}
				if r.ROM.Cue {
					files = append(files, r.ROM.Bins...)
				}
				for _, file := range files {
					var hash, extra string
					if gdbDS != nil {
						var err error
						hash, err = gdbDS.Hash(file)
						if err != nil {
							log.Printf("ERR: Can't hash file %s", file)
						}
						name := gdbDS.GetName(file)
						if name != "" && r.Err == ds.NotFoundErr {
							extra = "hash found but no GDB ID"
						}
					}
					if err := missingCSV.Write([]string{file, r.Err.Error(), hash, extra}); err != nil {
						log.Printf("ERR: Can't write to %s", *missing)
					}
				}
				continue
			}
			if r.XML.Image == "" && *missing != "" {
				var hash string
				if gdbDS != nil {
					var err error
					hash, err = gdbDS.Hash(r.ROM.Path)
					if err != nil {
						log.Printf("ERR: Can't hash file %s", r.ROM.Path)
					}
				}
				if err := missingCSV.Write([]string{r.ROM.FileName, "", hash, "missing image"}); err != nil {
					log.Printf("ERR: Can't write to %s", *missing)
				}
			}
			if _, ok := existing[r.XML.Path]; ok && *refreshOut {
				for i, g := range gl.GameList {
					if g.Path != r.XML.Path {
						continue
					}
					copy(gl.GameList[i:], gl.GameList[i+1:])
					gl.GameList = gl.GameList[:len(gl.GameList)-1]
				}
			}
			gl.Append(r.XML)
		}
	}()
	var stop bool
	sig := make(chan os.Signal, 1)
	signal.Notify(sig, os.Interrupt)
	defer signal.Stop(sig)
	go func() {
		for {
			<-sig
			if !stop {
				stop = true
				log.Println("Stopping, ctrl-c again to stop now.")
				ct.(*CancelTransport).Stop()
				for _ = range roms {
				}
				continue
			}
			panic("AHHHH!")
		}
	}()
	bins := make(map[string]struct{})
	if !*mame {
		walker := fs.Walk(xmlOpts.RomDir)
		for walker.Step() {
			if stop {
				break
			}
			if err := walker.Err(); err != nil {
				return err
			}
			f := walker.Path()
			if b := filepath.Base(f); b != "." && strings.HasPrefix(b, ".") {
				walker.SkipDir()
				continue
			}
			r, err := rom.NewROM(f)
			if err != nil {
				log.Printf("ERR: Processing: %s, %s", f, err)
				continue
			}
			if !r.Cue {
				continue
			}
			for _, b := range r.Bins {
				bins[b] = struct{}{}
			}
			bins[f] = struct{}{}
			if _, ok := existing[f]; !*refreshOut && ok {
				log.Printf("INFO: Skipping %s, already in gamelist.", f)
				continue
			}
			roms <- r
		}
	}
	walker := fs.Walk(xmlOpts.RomDir)
	for walker.Step() {
		if stop {
			break
		}
		if err := walker.Err(); err != nil {
			return err
		}
		f := walker.Path()
		if b := filepath.Base(f); b != "." && strings.HasPrefix(b, ".") {
			walker.SkipDir()
			continue
		}
		if _, ok := existing[f]; !*refreshOut && ok {
			log.Printf("INFO: Skipping %s, already in gamelist.", f)
			continue
		}
		r, err := rom.NewROM(f)
		if err != nil {
			log.Printf("ERR: Processing: %s, %s", f, err)
			continue
		}
		_, isExtra := extraMap[r.Ext]
		if *mame {
			if r.Ext == ".zip" || r.Ext == ".7z" || isExtra {
				roms <- r
			}
			continue
		}
		_, ok := bins[f]
		if !ok && (rh.KnownExt(r.Ext) || r.Ext == ".svm" || isExtra) {
			roms <- r
		}
	}
	close(roms)
	wg.Wait()
	wg.Add(1)
	close(results)
	wg.Wait()
	if stop {
		return UserCanceled
	} else {
		return nil
	}
}
Exemple #22
0
func Scan(dir string) ([]string, error) {

	fileInfo, err := os.Stat(dir)
	if err != nil {
		return nil, err
	}

	if !fileInfo.IsDir() {
		return nil, fmt.Errorf("'%s' is a file, directories contain packages", fileInfo.Name())
	}

	imports := map[string]bool{}

	w := fs.Walk(dir)
	w.Step()

	for w.Step() {

		fstat := w.Stat()

		if fstat.IsDir() {
			w.SkipDir()
			continue
		}

		// check for errors
		if w.Err() != nil {
			return nil, w.Err()
		}

		fpath := w.Path()

		// check the file is a .go file
		if strings.HasSuffix(fpath, ".go") {

			fset := token.NewFileSet()

			// parse only the import declarations in the .go file
			f, err := parser.ParseFile(fset, w.Path(), nil, parser.ImportsOnly)
			if err != nil {

				e := err.Error()
				if strings.Contains(e, eofError) {
					continue
				}

				if strings.Contains(e, importPathError) {
					for _, i := range f.Imports {
						if Valid(i.Path.Value) {
							path, err := strconv.Unquote(i.Path.Value)
							if err != nil {
								return nil, err
							}
							imports[path] = true
							continue
						}
					}
					continue
				}

				return nil, err
			}

			// unquote the import path value
			for _, i := range f.Imports {
				path, err := strconv.Unquote(i.Path.Value)
				if err != nil {
					return nil, err
				}
				imports[path] = true
			}
		}
	}

	for path, _ := range imports {
		for _, exception := range Exceptions {
			if Match(path, exception) {
				imports[path] = false
			}
		}
	}

	paths := []string{}
	for path, ok := range imports {
		if ok {
			paths = append(paths, path)
		}
	}

	return paths, nil
}
func compareDirectoriesRecursive(t *testing.T, aroot, broot string) {
	walker := fs.Walk(aroot)
	for walker.Step() {
		if err := walker.Err(); err != nil {
			t.Fatal(err)
		}
		// find paths
		aPath := walker.Path()
		aRel, err := filepath.Rel(aroot, aPath)
		if err != nil {
			t.Fatalf("could not find relative path for %v: %v", aPath, err)
		}
		bPath := path.Join(broot, aRel)

		if aRel == "." {
			continue
		}

		//t.Logf("comparing: %v a: %v b %v", aRel, aPath, bPath)

		// if a is a link, the sftp recursive copy won't have copied it. ignore
		aLink, err := os.Lstat(aPath)
		if err != nil {
			t.Fatalf("could not lstat %v: %v", aPath, err)
		}
		if aLink.Mode()&os.ModeSymlink != 0 {
			continue
		}

		// stat the files
		aFile, err := os.Stat(aPath)
		if err != nil {
			t.Fatalf("could not stat %v: %v", aPath, err)
		}
		bFile, err := os.Stat(bPath)
		if err != nil {
			t.Fatalf("could not stat %v: %v", bPath, err)
		}

		// compare stats, with some leniency for the timestamp
		if aFile.Mode() != bFile.Mode() {
			t.Fatalf("modes different for %v: %v vs %v", aRel, aFile.Mode(), bFile.Mode())
		}
		if !aFile.IsDir() {
			if aFile.Size() != bFile.Size() {
				t.Fatalf("sizes different for %v: %v vs %v", aRel, aFile.Size(), bFile.Size())
			}
		}
		timeDiff := aFile.ModTime().Sub(bFile.ModTime())
		if timeDiff > time.Second || timeDiff < -time.Second {
			t.Fatalf("mtimes different for %v: %v vs %v", aRel, aFile.ModTime(), bFile.ModTime())
		}

		// compare contents
		if !aFile.IsDir() {
			if aContents, err := ioutil.ReadFile(aPath); err != nil {
				t.Fatal(err)
			} else if bContents, err := ioutil.ReadFile(bPath); err != nil {
				t.Fatal(err)
			} else if string(aContents) != string(bContents) {
				t.Fatalf("contents different for %v", aRel)
			}
		}
	}
}
Exemple #24
0
// List finds all toolchains in the SRCLIBPATH.
//
// List does not find nested toolchains; i.e., if DIR is a toolchain
// dir (with a DIR/Srclibtoolchain file), then none of DIR's
// subdirectories are searched for toolchains.
func List() ([]*Info, error) {
	if noToolchains {
		return nil, nil
	}

	var found []*Info
	seen := map[string]string{}

	dirs := filepath.SplitList(srclib.Path)

	// maps symlinked trees to their original path
	origDirs := map[string]string{}

	for i := 0; i < len(dirs); i++ {
		dir := dirs[i]
		if dir == "" {
			dir = "."
		}
		w := fs.Walk(dir)
		for w.Step() {
			if w.Err() != nil {
				return nil, w.Err()
			}
			fi := w.Stat()
			name := fi.Name()
			path := w.Path()
			if path != dir && (name[0] == '.' || name[0] == '_') {
				w.SkipDir()
			} else if fi.Mode()&os.ModeSymlink != 0 {
				// Check if symlink points to a directory.
				if sfi, err := os.Stat(path); err == nil {
					if !sfi.IsDir() {
						continue
					}
				} else if os.IsNotExist(err) {
					continue
				} else {
					return nil, err
				}

				// traverse symlinks but refer to symlinked trees' toolchains using
				// the path to them through the original entry in SRCLIBPATH
				dirs = append(dirs, path+string(filepath.Separator))
				origDirs[path+string(filepath.Separator)] = dir
			} else if fi.Mode().IsDir() {
				// Check for Srclibtoolchain file in this dir.

				if _, err := os.Stat(filepath.Join(path, ConfigFilename)); os.IsNotExist(err) {
					continue
				} else if err != nil {
					return nil, err
				}

				// Found a Srclibtoolchain file.
				path = filepath.Clean(path)

				var base string
				if orig, present := origDirs[dir]; present {
					base = orig
				} else {
					base = dir
				}

				toolchainPath, _ := filepath.Rel(base, path)

				if otherDir, seen := seen[toolchainPath]; seen {
					return nil, fmt.Errorf("saw 2 toolchains at path %s in dirs %s and %s", toolchainPath, otherDir, path)
				}
				seen[toolchainPath] = path

				info, err := newInfo(toolchainPath, path, ConfigFilename)
				if err != nil {
					return nil, err
				}
				found = append(found, info)

				// Disallow nested toolchains to speed up List. This
				// means that if DIR/Srclibtoolchain exists, no other
				// Srclibtoolchain files underneath DIR will be read.
				w.SkipDir()
			}
		}
	}
	return found, nil
}
Exemple #25
0
func CompareTree(basepath string, remoteMap map[string]file, skipFiles, skipDirs []*regexp.Regexp, appendFile bool) (rmdirs []file, rms []file, mkdirs []file, puts []file) {
	walker := fs.Walk(basepath)

	for walker.Step() {
		if err := walker.Err(); err != nil {
			log.Warn("walker error", "err", err)
			continue
		}

		rel, err := filepath.Rel(basepath, walker.Path())
		if err != nil {
			log.Warn("rel error", "err", err)
			continue
		}

		if rel == "." {
			continue
		}

		name := filepath.Base(walker.Path())
		if walker.Stat().IsDir() {
			matched := false
			for _, skipDir := range skipDirs {
				if skipDir.MatchString(name) {
					walker.SkipDir()
					matched = true
					break
				}
			}
			if matched {
				continue
			}
		} else {
			matched := false
			for _, skipFile := range skipFiles {
				if skipFile.MatchString(name) {
					matched = true
					break
				}
			}
			if matched {
				continue
			}
		}

		stat := walker.Stat()
		mine := file{
			mode:    stat.Mode(),
			size:    stat.Size(),
			mod:     stat.ModTime(),
			path:    walker.Path(),
			relPath: rel,
		}

		if remote, ok := remoteMap[mine.relPath]; ok {
			if !mine.mode.IsDir() && !remote.mode.IsDir() {
				if mine.mod.After(remote.mod) {
					puts = append(puts, mine)
				} else if mine.size != remote.size {
					if remote.size < mine.size && appendFile {
						mine.offset = remote.size
					}
					puts = append(puts, mine)
				}
			} else if !mine.mode.IsDir() && remote.mode.IsDir() {
				rmdirs = append(rmdirs, remote)
				puts = append(puts, mine)
			} else if mine.mode.IsDir() && !remote.mode.IsDir() {
				rms = append(rms, remote)
			}
			delete(remoteMap, remote.relPath)
		} else {
			if mine.mode.IsDir() {
				mkdirs = append(mkdirs, mine)
			} else {
				puts = append(puts, mine)
			}
		}
	}

	for _, remote := range remoteMap {
		if remote.mode.IsDir() {
			rmdirs = append(rmdirs, remote)
		} else {
			rms = append(rms, remote)
		}
	}

	sort.Sort(fileByPath(mkdirs))
	sort.Sort(fileByPath(puts))
	sort.Reverse(fileByPath(rmdirs))
	sort.Reverse(fileByPath(rms))

	return rmdirs, rms, mkdirs, puts
}
Exemple #26
0
// CrawlROMs crawls the rom directory and processes the files.
func CrawlROMs(gl *rom.GameListXML, sources []ds.DS, xmlOpts *rom.XMLOpts, gameOpts *rom.GameOpts) error {
	var ct http.RoundTripper = NewCancelTransport(http.DefaultTransport.(*http.Transport))
	http.DefaultClient.Transport = ct

	existing := make(map[string]struct{})

	for _, x := range gl.GameList {
		p, err := filepath.Rel(xmlOpts.RomXMLDir, x.Path)
		if err != nil {
			log.Printf("Can't find original path: %s", x.Path)
		}
		f := filepath.Join(xmlOpts.RomDir, p)
		existing[f] = struct{}{}
	}

	var wg sync.WaitGroup
	results := make(chan *rom.GameXML, *workers)
	roms := make(chan *rom.ROM, 2**workers)
	for i := 0; i < *workers; i++ {
		wg.Add(1)
		go worker(sources, xmlOpts, gameOpts, results, roms, &wg)
	}
	go func() {
		defer wg.Done()
		for r := range results {
			gl.Append(r)
		}
	}()
	var stop bool
	sig := make(chan os.Signal, 1)
	signal.Notify(sig, os.Interrupt)
	defer signal.Stop(sig)
	go func() {
		for {
			<-sig
			if !stop {
				stop = true
				log.Println("Stopping, ctrl-c again to stop now.")
				ct.(*CancelTransport).Stop()
				for _ = range roms {
				}
				continue
			}
			panic("AHHHH!")
		}
	}()
	bins := make(map[string]struct{})
	if !*mame {
		walker := fs.Walk(xmlOpts.RomDir)
		for walker.Step() {
			if stop {
				break
			}
			if err := walker.Err(); err != nil {
				return err
			}
			f := walker.Path()
			r, err := rom.NewROM(f)
			if err != nil {
				log.Printf("ERR: Processing: %s, %s", f, err)
				continue
			}
			if !r.Cue {
				continue
			}
			for _, b := range r.Bins {
				bins[b] = struct{}{}
			}
			bins[f] = struct{}{}
			if _, ok := existing[f]; ok {
				log.Printf("INFO: Skipping %s, already in gamelist.", f)
				continue
			}
			roms <- r
		}
	}
	walker := fs.Walk(xmlOpts.RomDir)
	for walker.Step() {
		if stop {
			break
		}
		if err := walker.Err(); err != nil {
			return err
		}
		f := walker.Path()
		if _, ok := existing[f]; ok {
			log.Printf("INFO: Skipping %s, already in gamelist.", f)
			continue
		}
		r, err := rom.NewROM(f)
		if err != nil {
			log.Printf("ERR: Processing: %s, %s", f, err)
			continue
		}
		if *mame {
			if r.Ext == ".zip" || r.Ext == ".7z" {
				roms <- r
			}
			continue
		}
		_, ok := bins[f]
		if !ok && rh.KnownExt(r.Ext) {
			roms <- r
		}
	}
	close(roms)
	wg.Wait()
	wg.Add(1)
	close(results)
	wg.Wait()
	if stop {
		return UserCanceled
	} else {
		return nil
	}
}
Exemple #27
0
func TestWalk(t *testing.T) {
	makeTree(t)
	errors := make([]error, 0, 10)
	clear := true
	markFn := func(walker *fs.Walker) (err error) {
		for walker.Step() {
			err = mark(walker.Path(), walker.Stat(), walker.Err(), &errors, clear)
			if err != nil {
				break
			}
		}
		return err
	}
	// Expect no errors.
	err := markFn(fs.Walk(tree.name))
	if err != nil {
		t.Fatalf("no error expected, found: %s", err)
	}
	if len(errors) != 0 {
		t.Fatalf("unexpected errors: %s", errors)
	}
	checkMarks(t, true)
	errors = errors[0:0]

	// Test permission errors.  Only possible if we're not root
	// and only on some file systems (AFS, FAT).  To avoid errors during
	// all.bash on those file systems, skip during go test -short.
	if os.Getuid() > 0 && !testing.Short() {
		// introduce 2 errors: chmod top-level directories to 0
		os.Chmod(filepath.Join(tree.name, tree.entries[1].name), 0)
		os.Chmod(filepath.Join(tree.name, tree.entries[3].name), 0)

		// 3) capture errors, expect two.
		// mark respective subtrees manually
		markTree(tree.entries[1])
		markTree(tree.entries[3])
		// correct double-marking of directory itself
		tree.entries[1].mark--
		tree.entries[3].mark--
		err := markFn(fs.Walk(tree.name))
		if err != nil {
			t.Fatalf("expected no error return from Walk, got %s", err)
		}
		if len(errors) != 2 {
			t.Errorf("expected 2 errors, got %d: %s", len(errors), errors)
		}
		// the inaccessible subtrees were marked manually
		checkMarks(t, true)
		errors = errors[0:0]

		// 4) capture errors, stop after first error.
		// mark respective subtrees manually
		markTree(tree.entries[1])
		markTree(tree.entries[3])
		// correct double-marking of directory itself
		tree.entries[1].mark--
		tree.entries[3].mark--
		clear = false // error will stop processing
		err = markFn(fs.Walk(tree.name))
		if err == nil {
			t.Fatalf("expected error return from Walk")
		}
		if len(errors) != 1 {
			t.Errorf("expected 1 error, got %d: %s", len(errors), errors)
		}
		// the inaccessible subtrees were marked manually
		checkMarks(t, false)
		errors = errors[0:0]

		// restore permissions
		os.Chmod(filepath.Join(tree.name, tree.entries[1].name), 0770)
		os.Chmod(filepath.Join(tree.name, tree.entries[3].name), 0770)
	}

	// cleanup
	if err := os.RemoveAll(tree.name); err != nil {
		t.Errorf("removeTree: %v", err)
	}
}
func main() {

	fn := "CDNUploader v0.1"

	cloudAPI, err := googles.Login()
	if err != nil {
		panic(err)
	}

	fileIndex := map[string]string{}
	indexBytes, err := ioutil.ReadFile(CDN_PROGRESS)
	if err == nil {
		if !theta.UnmarshalJSON(fn, indexBytes, &fileIndex) {
			fmt.Println("FAILED TO LOAD EXISTING PROGRESS FILE:", CDN_PROGRESS)
			fileIndex = map[string]string{}
		}
	} else {
		fmt.Println("CANNOT FIND EXISTING PROGRESS FILE:", CDN_PROGRESS)
	}

	walker := fs.Walk(".")
	for walker.Step() {

		if err := walker.Err(); err != nil {
			fmt.Println(err)
			continue
		}

		if walker.Stat().IsDir() {
			continue
		}

		objectName := walker.Path()

		if objectName == "cdn.go" || objectName == "cdn.progress" || string(objectName[0]) == "." {
			continue
		}

		// write to google cloud

		b, err := ioutil.ReadFile(objectName)
		if err != nil {
			panic(err)
		}

		objectHash := theta.SHA1b(b)

		if fileIndex[objectName] == objectHash {
			fmt.Println(objectHash, "OK")
			continue
		}

		n := strings.LastIndex(objectName, ".") + 1
		var ct string
		if n > -1 {
			ct = objectName[n:]
		}

		var contentType string
		switch ct {

		case "css":
			contentType = "text/css"
		case "js":
			contentType = "application/javascript"

		default:

			contentType = mime.TypeByExtension(objectName)
			if len(contentType) == 0 || contentType == "application/octet-stream" {
				contentType = http.DetectContentType(b)
			}

		}

		err = cloudAPI.StorageWriter(true, contentType, "libs.leadinglocally.com", objectName, b)
		if err != nil {
			panic(err)
		}

		fileIndex[objectName] = objectHash

		fmt.Println(contentType+" UPLOADED:", objectName)
	}

	ok, b := theta.MarshalJSON(fn, fileIndex)
	for ok {
		err := ioutil.WriteFile(CDN_PROGRESS, b, 0777)
		if err != nil {
			panic(err)
		}

		fmt.Println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^")
		fmt.Println("UPLOAD QUEUE COMPLETED...")
		fmt.Println("______________________________________________")
		return
	}

	fmt.Println("ERROR COMPLETING UPLOADS")
}
Exemple #29
0
func (c *LintCmd) Execute(args []string) error {
	if len(c.Args.Paths) == 0 {
		c.Args.Paths = []string{"."}
	}

	issuec := make(chan string)
	quitc := make(chan struct{})
	go func() {
		for {
			select {
			case issue := <-issuec:
				fmt.Println(issue)
			case <-quitc:
				return
			}
		}
	}()

	lrepo, lrepoErr := openLocalRepo()
	if lrepoErr != nil {
		log.Printf("warning: while opening current dir's repo: %s", lrepoErr)
	}

	var wg sync.WaitGroup
	for _, path := range c.Args.Paths {
		w := fs.Walk(path)
		for w.Step() {
			if err := w.Err(); err != nil {
				return err
			}
			if fi := w.Stat(); fi.Mode().IsRegular() {
				suffix, typ := buildstore.DataType(fi.Name())
				if suffix != "" {
					absPath, err := filepath.Abs(w.Path())
					if err != nil {
						return err
					}

					var unitType, unitName string
					if !c.NoCheckResolve {
						if !strings.Contains(absPath, buildstore.BuildDataDirName) {
							return fmt.Errorf("couldn't infer which source unit %s corresponds to, because its absolute path is not under any %s dir; either run with --no-check-resolve to skip checking that internal refs resolve to valid defs (which requires knowing what source unit each output file is from), or run 'src lint' against .srclib-cache or subdirectories of it", w.Path(), buildstore.BuildDataDirName)
						}
						unitType = strings.TrimSuffix(fi.Name(), "."+suffix+".json")
						// Infer source unit name from file path (the
						// path components after .srclib-cache until
						// the basename).
						pcs := strings.Split(absPath, string(filepath.Separator))
						for i, pc := range pcs {
							if pc == buildstore.BuildDataDirName && len(pcs) > i+2 {
								unitName = filepath.Join(pcs[i+2 : len(pcs)-1]...)
								break
							}
						}
					}

					var commitID string
					if !c.NoCheckFiles {
						// Infer commit ID from file path (the path component after .srclib-cache).
						pcs := strings.Split(absPath, string(filepath.Separator))
						for i, pc := range pcs {
							if pc == buildstore.BuildDataDirName && len(pcs) > i+1 {
								commitID = pcs[i+1]
								break
							}
						}
					}
					if commitID == "" && !c.NoCheckFiles {
						return fmt.Errorf("couldn't infer which commit ID %s was built from, which is necessary to check that file/dir fields refer to actual files; either run with --no-check-files to skip the file/dir check or pass paths that contain '.../.srclib-cache/COMMITID/...' (which allows this command to infer the commit ID)", w.Path())
					}

					// Ensure that commitID matches the local repo's commit ID.
					if commitID != "" && !c.NoCheckFiles {
						if lrepo != nil && lrepo.CommitID != commitID {
							return fmt.Errorf("%s was built from commit %s, but the current repo working tree HEAD is commit %s; these must be the same to check that file/dir fields refer to actual files in the repo that their specific commit, so you must either (1) only run lint against build data files for commit %s; (2) run with --no-check-files to skip the file/dir check; or (3) check out commit %s in this repo", w.Path(), commitID, lrepo.CommitID, lrepo.CommitID, commitID)
						}
					}

					checkFilesExist := !c.NoCheckFiles

					wg.Add(1)
					go func(path string) {
						defer wg.Done()

						var issues []string
						var err error
						switch typ.(type) {
						case unit.SourceUnit:
							issues, err = lintSourceUnit(lrepo.RootDir, path, checkFilesExist)
						case *graph.Output:
							issues, err = lintGraphOutput(lrepo.RootDir, c.Repo, unitType, unitName, path, checkFilesExist)
						case []*dep.ResolvedDep:
							issues, err = lintDepresolveOutput(lrepo.RootDir, path, checkFilesExist)
						}
						for _, issue := range prependLabelToStrings(path, issues) {
							issuec <- issue
						}
						if err != nil {
							log.Fatalf(redbg("ERR")+" %s: %s", path, err)
						}
					}(w.Path())
				}
			}
		}
	}

	wg.Wait()
	close(quitc)

	return nil
}
Exemple #30
0
// ScanProject
func ScanProject(dir string) ([]string, error) {

	imports := map[string]bool{}

	w := fs.Walk(dir)
	for w.Step() {

		fstat := w.Stat()

		if fstat.IsDir() {

			// check if that directory is "_vendor"
			n := fstat.Name()
			if n == "vendor" || n == "testdata" || []rune(n)[0] == '_' {
				w.SkipDir()
				continue
			}
		}

		// check for errors
		if w.Err() != nil {
			return nil, w.Err()
		}

		fpath := w.Path()

		// check the file is a .go file
		if strings.HasSuffix(fpath, ".go") {

			fset := token.NewFileSet()

			// parse only the import declarations in the .go file
			f, err := parser.ParseFile(fset, w.Path(), nil, parser.ImportsOnly)
			if err != nil {

				e := err.Error()
				if strings.Contains(e, eofError) {
					continue
				}

				if strings.Contains(e, importPathError) {
					for _, i := range f.Imports {
						if Valid(i.Path.Value) {
							path, err := strconv.Unquote(i.Path.Value)
							if err != nil {
								return nil, err
							}
							imports[path] = true
							continue
						}
					}
					continue
				}

				return nil, err
			}

			// unquote the import path value
			for _, i := range f.Imports {
				path, err := strconv.Unquote(i.Path.Value)
				if err != nil {
					return nil, err
				}
				imports[path] = true
			}
		}
	}

	for path := range imports {
		for _, exception := range Exceptions {
			if Match(path, exception) {
				imports[path] = false
			}
		}
	}

	paths := []string{}
	for path, ok := range imports {
		if ok {
			paths = append(paths, path)
		}
	}

	paths = FilterStdPkgs(paths)

	projectpath, err := ImportPath(".")
	if err != nil {
		return nil, err
	}

	// filter out packages internal to the project
	paths = strutil.RemovePrefixInStringSlice(projectpath, paths)

	return paths, nil
}