Ejemplo n.º 1
0
func readCsv(ch chan []string) {
	var reader *csv.Reader
	if inputFn == "" {
		reader = csv.NewReader(os.Stdin)
	} else {
		file, err := os.Open(inputFn)
		if err != nil {
			fmt.Println("Error:", err)
			os.Exit(1)
		}
		defer file.Close()
		reader = csv.NewReader(file)
	}
	if !strictLen {
		reader.FieldsPerRecord = -1
	}
	r, _ := utf8.DecodeRuneInString(inputSep)
	reader.Comma = r
	reader.LazyQuotes = lazyQuotes

	for {
		record, err := reader.Read()
		if err == io.EOF {
			close(ch)
			break
		} else if err != nil {
			fmt.Println("Error:", err)
			close(ch)
			break
		}
		ch <- record
	}
}
Ejemplo n.º 2
0
func (con Sif2Cx) readSIF(reader *csv.Reader, w *bufio.Writer) {
	// Set delimiter

	var netName string
	if con.Name == "" {
		netName = "CX from SIF file"
	} else {
		netName = con.Name
	}

	reader.Comma = con.Delimiter
	reader.LazyQuotes = true

	// nodes already serialized
	nodesExists := make(map[string]int64)

	nodeCounter := int64(0)

	w.Write([]byte("["))

	for {
		record, err := reader.Read()

		if err == io.EOF {
			// Add network attributes at the end of doc.
			netAttr := cx.NetworkAttribute{N: "name", V: netName}

			attrList := []cx.NetworkAttribute{netAttr}
			netAttrs := make(map[string][]cx.NetworkAttribute)

			netAttrs["networkAttributes"] = attrList

			json.NewEncoder(w).Encode(netAttrs)

			w.Write([]byte("]"))
			w.Flush()
			break
		}

		if err != nil {
			log.Fatal(err)
		}

		if len(record) == 3 {
			toJson(record, nodesExists, &nodeCounter, w)
		}

		w.Flush()
	}
}
Ejemplo n.º 3
0
func NewReader(path string, comma rune, stripBom bool) (*csv.Reader, *os.File, error) {
	var csvReader *csv.Reader
	var file *os.File
	file, err := os.Open(path)
	if err != nil {
		return csvReader, file, err
	}
	if stripBom {
		b3 := make([]byte, 3)
		_, err := file.Read(b3)
		if err != nil {
			return csvReader, file, err
		}
	}
	csvReader = csv.NewReader(file)
	csvReader.Comma = comma
	return csvReader, file, nil
}
Ejemplo n.º 4
0
Archivo: diff.go Proyecto: yukimemi/gfi
func executeDiff(cmd *cobra.Command, args []string) {

	var (
		err error

		match  *regexp.Regexp
		ignore *regexp.Regexp

		csvMap  = make(map[string][]string)
		fisList = make([]FileInfos, 0)
		q       = make(chan info)
		wg      = new(sync.WaitGroup)
	)

	if len(args) == 0 {
		cmd.Help()
		return
	}

	// Get glob file args.
	args, err = core.GetGlobArgs(args)
	if err != nil {
		log.Fatalln(err)
	}

	// Recheck args.
	if len(args) <= 1 {
		cmd.Help()
		return
	}

	// Load csv and store.
	for _, csvPath := range args {
		fmt.Println("Open:", csvPath)
		c, err := os.Open(csvPath)
		if err != nil {
			log.Fatalln(err)
		}
		defer c.Close()
		var reader *csv.Reader
		if sjisIn {
			reader = csv.NewReader(transform.NewReader(c, japanese.ShiftJIS.NewDecoder()))
		} else {
			reader = csv.NewReader(c)
		}
		reader.Comma = '\t'
		// Skip header.
		_, err = reader.Read()
		if err != nil {
			log.Fatalln(err)
		}
		left, err := reader.ReadAll()
		if err != nil {
			log.Fatalln(err)
		}

		// Change data to FileInfos struct.
		fis := make(FileInfos, 0)
		for _, r := range left {
			fis = append(fis, *csvToFileInfo(r))
		}
		fisList = append(fisList, fis)
	}

	// Compile if given matches and ignores.
	if len(matches) != 0 {
		match, err = core.CompileStrs(matches)
		if err != nil {
			log.Fatalln(err)
		}
	}
	if len(ignores) != 0 {
		ignore, err = core.CompileStrs(ignores)
		if err != nil {
			log.Fatalln(err)
		}
	}

	for i, one := range fisList {
		wg.Add(1)
		go func(i int, one FileInfos) {
			defer wg.Done()

			// Diff fileinfo.
			for _, oneFi := range one {
				if fileOnly && oneFi.Type == DIR {
					continue
				}
				if dirOnly && oneFi.Type == FILE {
					continue
				}

				// Ignore check.
				if ignore != nil && ignore.MatchString(oneFi.Full) {
					continue
				}

				// Match check.
				if match != nil && !match.MatchString(oneFi.Full) {
					continue
				}

				for j, other := range fisList {
					if i == j {
						continue
					}

					// Get other's same full path info.
					otherFi, err := findFileInfo(other, oneFi)
					if err == nil {
						// Diff Time.
						if oneFi.Time != otherFi.Time {
							q <- info{
								path:  args[i],
								index: i,
								full:  oneFi.Full,
								diff:  FileTime,
								value: oneFi.Time,
								ford:  oneFi.Type,
							}
						}
						// Diff Size.
						if oneFi.Size != otherFi.Size {
							q <- info{
								path:  args[i],
								index: i,
								full:  oneFi.Full,
								diff:  FileSize,
								value: oneFi.Size,
								ford:  oneFi.Type,
							}
						}
						// Diff Mode.
						if oneFi.Mode != otherFi.Mode {
							q <- info{
								path:  args[i],
								index: i,
								full:  oneFi.Full,
								diff:  FileMode,
								value: oneFi.Mode,
								ford:  oneFi.Type,
							}
						}
					} else {
						q <- info{
							path:  args[i],
							index: i,
							full:  oneFi.Full,
							diff:  FileFull,
							value: oneFi.Full,
							ford:  oneFi.Type,
						}
					}
				}
			}
		}(i, one)
	}

	// Async wait.
	go func() {
		wg.Wait()
		close(q)
	}()

	// Receive diff and store to array.
	for info := range q {
		cnt++
		if !silent {
			fmt.Fprintf(os.Stderr, "Count: %d\r", cnt)
		}
		key := info.full + fmt.Sprint(info.diff)
		if _, ok := csvMap[key]; ok {
			csvMap[key][info.index+3] = info.value
		} else {
			s := make([]string, len(args)+3)
			s[0] = info.full
			s[1] = info.ford
			s[2] = fmt.Sprint(info.diff)
			s[info.index+3] = info.value
			csvMap[key] = s
		}
	}

	if len(csvMap) == 0 {
		fmt.Println("There is no difference !")
		return
	}

	// Output to csv.
	os.MkdirAll(filepath.Dir(out), os.ModePerm)
	c, err := os.Create(out)
	if err != nil {
		log.Fatalln(err)
	}
	defer c.Close()
	var writer *csv.Writer
	if sjisOut {
		writer = csv.NewWriter(transform.NewWriter(c, japanese.ShiftJIS.NewEncoder()))
	} else {
		writer = csv.NewWriter(c)
	}
	writer.Comma = '\t'
	writer.UseCRLF = true

	// Write header.
	err = writer.Write(append(strings.Split(DiffHeader, "\t"), args...))
	if err != nil {
		log.Fatalln(err)
	}

	// map to array.
	var csvArray records
	for _, v := range csvMap {
		csvArray = append(csvArray, v)
	}

	// sort
	if sorts == "" {
		sorts = "0,2"
	}
	sort.Sort(csvArray)

	for _, v := range csvArray {
		err = writer.Write(v)
		if err != nil {
			log.Fatalln(err)
		}
	}
	writer.Flush()
	fmt.Printf("Write to [%s]. ([%d] row)\n", out, cnt)
}
Ejemplo n.º 5
0
func ImportDictionaries() map[string][]*models.SuggestItem {
	var itemMap = make(map[string][]*models.SuggestItem)

	fileInfo, err := ioutil.ReadDir(DataDirectory)
	if err != nil {
		log.Print(err)
		return itemMap
	}
	numberOfDictionaries := 0
	for _, file := range fileInfo {
		if !file.IsDir() && (strings.HasSuffix(file.Name(), ".txt") || strings.HasSuffix(file.Name(), ".txt.gz")) {
			dictionaryFile := fmt.Sprintf("%s%s%s", DataDirectory, string(os.PathSeparator), file.Name())
			dictionaryName := strings.TrimSuffix(strings.TrimSuffix(file.Name(), ".gz"), ".txt")
			log.Printf("Importing dictionary %s from file %s", dictionaryName, dictionaryFile)

			csvFile, err := os.Open(dictionaryFile)
			if err != nil {
				log.Print(err)
				continue
			}
			defer csvFile.Close()
			var csvReader *csv.Reader
			if strings.HasSuffix(file.Name(), ".txt.gz") {
				gzipReader, gzerr := gzip.NewReader(csvFile)
				if gzerr == nil {
					defer gzipReader.Close()
					csvReader = csv.NewReader(gzipReader)
				} else {
					log.Print(gzerr)
					continue
				}
			} else {
				csvReader = csv.NewReader(csvFile)
			}

			csvReader.FieldsPerRecord = 2
			csvReader.Comma = '|'
			csvReader.LazyQuotes = true
			csvReader.TrimLeadingSpace = true

			rawCSVdata, err := csvReader.ReadAll()
			if err != nil {
				log.Print(err)
				continue
			}

			for _, each := range rawCSVdata {
				var suggestItem = new(models.SuggestItem)
				suggestItem.Term = each[0]
				weight, err := strconv.Atoi(each[1])
				if err == nil {
					suggestItem.Weight = weight
					itemMap[dictionaryName] = append(itemMap[dictionaryName], suggestItem)
				}

			}
			numberOfDictionaries++
		}
	}

	log.Printf("Imported %d dictionaries", numberOfDictionaries)
	return itemMap
}
Ejemplo n.º 6
0
// Loads a CSV file from http://download.gisgraphy.com/openstreetmap/csv/cities/
// The expected CSV columns are:
//   0 :  Node type;   N|W|R (in uppercase), wheter it is a Node, Way or Relation in the openstreetmap Model
//   1 :  id;  The openstreetmap id
//   2 :  name;    the default name of the city
//   3 :  countrycode; The iso3166-2 country code (2 letters)
//   4 :  postcode;    The postcode / zipcode / ons code / municipality code / ...
//   5 :  population;  How many people lives in that city
//   6 :  location;    The middle location of the city in HEXEWKB
//   7 :  shape; The delimitation of the city in HEXEWKB
//   8 :  type; the type of city ('city', 'village', 'town', 'hamlet', ...)
//   9 :  is_in ; where the cities is located (generally the fully qualified administrative division)
//   10 : alternatenames;     the names of the city in other languages
func load_gisgraphy_cities_csv(rt *rtreego.Rtree, fname string) (int, error) {
	log.Println("Loading", fname, "...")
	f, err := os.Open(fname)
	if err != nil {
		return 0, err
	}
	defer f.Close()

	df, err := Decompressor(f)
	if err != nil {
		log.Fatal(err)
	}

	var r *csv.Reader
	if strings.Contains(fname, ".tar.") {
		// Handles uncompressed files downloaded from gisgraphy.com
		tf := tar.NewReader(df)
		for {
			hdr, err := tf.Next()
			if err == io.EOF {
				log.Fatal("Couldn't find CSV file in " + fname)
			} else if err != nil {
				log.Fatal(err)
			}
			if strings.HasSuffix(hdr.Name, ".txt") {
				r = csv.NewReader(tf)
				break
			}
		}
	} else {
		// Handles unzipped files
		r = csv.NewReader(df)
	}
	r.Comma = '\t'
	line := 0
	loaded_objects := 0
	for {
		cols, err := r.Read()
		if err == io.EOF {
			break
		} else if err != nil {
			return loaded_objects, err
		}
		line++
		if len(cols[7]) == 0 {
			continue
		}

		osm_id, err := strconv.ParseInt(cols[1], 10, 64)
		if err != nil {
			log.Fatal("Error parsing", cols, line, err)
		}

		geom, err := geos.FromHex(cols[7])
		if err != nil {
			log.Fatal("Error parsing", cols, line, err)
		}
		rect, err := RtreeBboxg(geom, 1e-5)
		if err != nil {
			log.Fatal("Error getting bbox", cols, line, err)
		}
		obj := GeoObj{rect,
			&GeoData{
				Id:            osm_id,
				City:          cols[2],
				CountryCode_2: cols[3],
				Type:          "city",
				Geom:          geom}}
		rt.Insert(&obj)
		loaded_objects++
	}
	return loaded_objects, nil
}