Example #1
0
// NewWithCsvWriter writes all of the Rows in a Table to a CSV file using the options in the CSV writer.
// It assumes that all Rows have the same headers. Columns are written in alphabetical order.
func NewWithCsvWriter(writer *csv.Writer) optimus.Sink {
	return func(source optimus.Table) error {
		defer source.Stop()
		headers := []string{}
		wroteHeader := false
		for row := range source.Rows() {
			if !wroteHeader {
				headers = convertRowToHeader(row)
				sort.Strings(headers)
				if err := writer.Write(headers); err != nil {
					return err
				}
				wroteHeader = true
			}
			if err := writer.Write(convertRowToRecord(row, headers)); err != nil {
				return err
			}
		}
		if source.Err() != nil {
			return source.Err()
		}
		writer.Flush()
		if writer.Error() != nil {
			return writer.Error()
		}
		return nil
	}
}
func writeSalesData(conn *sql.DB, writer *csv.Writer, first int, last int, wg *sync.WaitGroup) {
	defer wg.Done()

	rows, err := conn.Query("select * from sales where id between ? and ?", first, last)
	if err != nil {
		fmt.Println(err)
		os.Exit(1)
	}

	var orderId int
	var userId int
	var orderAmount float64

	for rows.Next() {
		rows.Scan(&orderId, &userId, &orderAmount)

		writer.Write([]string{
			strconv.Itoa(orderId),
			strconv.Itoa(userId),
			strconv.FormatFloat(orderAmount, 'f', 6, 64),
		},
		)
	}

	writer.Flush()
}
func writeOneRecordAtaTime(writer *csv.Writer, data [][]string) {

	for _, row := range data {
		writer.Write(row)
	}
	writer.Flush()
}
Example #4
0
func writeTo(writer *csv.Writer, in interface{}) error {
	inValue, inType := getConcreteReflectValueAndType(in) // Get the concrete type (not pointer) (Slice<?> or Array<?>)
	if err := ensureInType(inType); err != nil {
		return err
	}
	inInnerWasPointer, inInnerType := getConcreteContainerInnerType(inType) // Get the concrete inner type (not pointer) (Container<"?">)
	if err := ensureInInnerType(inInnerType); err != nil {
		return err
	}
	inInnerStructInfo := getStructInfo(inInnerType) // Get the inner struct info to get CSV annotations
	csvHeadersLabels := make([]string, len(inInnerStructInfo.Fields))
	for i, fieldInfo := range inInnerStructInfo.Fields { // Used to write the header (first line) in CSV
		csvHeadersLabels[i] = fieldInfo.getFirstKey()
	}
	if err := writer.Write(csvHeadersLabels); err != nil {
		return err
	}
	inLen := inValue.Len()
	for i := 0; i < inLen; i++ { // Iterate over container rows
		for j, fieldInfo := range inInnerStructInfo.Fields {
			csvHeadersLabels[j] = ""
			inInnerFieldValue, err := getInnerField(inValue.Index(i), inInnerWasPointer, fieldInfo.IndexChain) // Get the correct field header <-> position
			if err != nil {
				return err
			}
			csvHeadersLabels[j] = inInnerFieldValue
		}
		if err := writer.Write(csvHeadersLabels); err != nil {
			return err
		}
	}
	writer.Flush()
	return writer.Error()
}
Example #5
0
func writeCsv(ch chan []string) {
	var writer *csv.Writer
	if outputFn == "" {
		writer = csv.NewWriter(os.Stdout)
	} else {
		file, err := os.Create(outputFn)
		if err != nil {
			fmt.Println("Error:", err)
			os.Exit(1)
		}
		defer file.Close()
		writer = csv.NewWriter(file)
	}

	r, _ := utf8.DecodeRuneInString(outputSep)
	writer.Comma = r

	for row := range ch {
		err := writer.Write(row)
		if err != nil {
			fmt.Println("Error:", err)
			close(ch)
			return
		}
	}
	writer.Flush()
}
Example #6
0
func (p *csvProcessor) flush(writer *csv.Writer, err error) error {
	if err != nil {
		return err
	}
	writer.Flush()
	return writer.Error()
}
Example #7
0
func NewAppend(path string) *kvAppend {
	kv := &kvAppend{
		kvMem: NewMemory(),
		queue: make(chan *record),
	}

	var lastErr error

	f, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR, 0644)
	if err != nil {
		lastErr = err
	} else {
		r := csv.NewReader(f)
		for {
			rec, err := r.Read()
			if err != nil {
				if err != io.EOF {
					lastErr = err
				}
				break
			}
			kv.kvMem.m[rec[0]] = []byte(rec[1])
		}
	}

	var w *csv.Writer
	if lastErr == nil {
		w = csv.NewWriter(f)
	}

	go func() {
		for r := range kv.queue {
			if lastErr == nil {
				var rec []string
				if r.Value != nil {
					rec = []string{r.Key, string(r.Value)}
				} else {
					rec = []string{r.Key}
				}
				if err = w.Write(rec); err != nil {
					lastErr = err
				}
				if r != nil && r.Err != nil {
					w.Flush()
					f.Sync()
					r.Err <- nil
					close(r.Err)
				}
			} else if r.Err != nil {
				r.Err <- lastErr
			}
		}
		if f != nil {
			f.Close()
		}
	}()

	return kv
}
Example #8
0
func processResults(config *Config, resultChan <-chan *JobResult) map[string]*JobStats {
	var resultFile *csv.Writer
	var allTestStats = make(map[string]*JobStats)
	var recentTestStats = make(map[string]*JobStats)

	if len(*queryStatsFile) > 0 {
		if file, err := os.Create(*queryStatsFile); err != nil {
			log.Fatalf("Could not open result file %s: %v",
				*queryStatsFile, err)
		} else {
			defer file.Close()

			resultFile = csv.NewWriter(file)
			defer resultFile.Flush()
		}
	}

	ticker := time.NewTicker(*updateInterval)
	if !*intermediateUpdates {
		ticker.Stop()
	}
	defer ticker.Stop()

	for {
		select {
		case jr, ok := <-resultChan:
			if !ok {
				return allTestStats
			}
			if resultFile != nil {
				resultFile.Write([]string{
					jr.Name,
					strconv.FormatInt(jr.Start.Nanoseconds()/1000, 10),
					strconv.FormatInt(jr.Elapsed.Nanoseconds()/1000, 10),
					strconv.FormatInt(jr.RowsAffected, 10),
				})
			}
			if _, ok := allTestStats[jr.Name]; !ok {
				allTestStats[jr.Name] = new(JobStats)
			}
			if _, ok := recentTestStats[jr.Name]; !ok {
				recentTestStats[jr.Name] = new(JobStats)
			}

			allTestStats[jr.Name].Update(jr)
			recentTestStats[jr.Name].Update(jr)

		case <-ticker.C:
			for name, stats := range recentTestStats {
				log.Printf("%s: %v", name, stats)
			}
			recentTestStats = make(map[string]*JobStats)
		}
	}
}
Example #9
0
func webQuitHandler(diskwriter *csv.Writer, gzipwriter *gzip.Writer, csvfile *os.File) http.HandlerFunc {
	return func(w http.ResponseWriter, r *http.Request) {
		fmt.Fprintf(w, "flushing to disk and shutting down")
		diskwriter.Flush()
		if gzipwriter != nil {
			gzipwriter.Flush()
			gzipwriter.Close()
		}
		csvfile.Close()
		os.Exit(0)
	}
}
func (c Tweet) csvWriter(writer *csv.Writer, m chan Tweet) {
	var mutex = &sync.Mutex{}
	for i := range m {
		c = i
		//fmt.Println(c)
		data := []string{c.User, c.Post_date, c.Message}
		mutex.Lock()
		writer.Write(data)
		writer.Flush()
		mutex.Unlock()
	}
}
Example #11
0
File: qhttp.go Project: stone/qhttp
// Takes a *result struct and writes out lines to *csv.Writer
func writeCsvLine(w *csv.Writer, res *result) {
	headers_joined := strings.Join(res.headers, ",")
	// When we save to CSV duration is always in seconds
	duration_seconds := fmt.Sprintf("%v", res.time.Seconds())
	// We need a array of strings for the csv package.
	record := []string{res.url, res.httpStatus, headers_joined, duration_seconds}
	err := w.Write(record)
	if err != nil {
		fmt.Println("Problems writing to csv file")
	}
	w.Flush()
}
Example #12
0
func writeSif(nodes map[int64]string, edges []cx.Edge, w csv.Writer) {
	for i := range edges {

		edge := edges[i]

		if edge.I == "" {
			w.Write([]string{nodes[edge.S], "i", nodes[edge.T]})
		} else {
			w.Write([]string{nodes[edge.S], edge.I, nodes[edge.T]})
		}
	}
	w.Flush()
}
func (c Tweet) CsvWriter(writer *csv.Writer, m chan Tweet) {
	var mutex = &sync.Mutex{}
	for i := range m {
		c = i
		//fmt.Println(c)
		data := []string{c.User, c.Post_date, c.Message}
		//Introduced locks for write to csv file
		mutex.Lock()
		writer.Write(data)
		writer.Flush()
		mutex.Unlock()
		//lock closed
	}
}
Example #14
0
func (t TeamStats) Summary(w *csv.Writer) {
	err := w.Write([]string{
		fmt.Sprintf("%v", t.Scored),
		fmt.Sprintf("%v", t.OppScore),
		fmt.Sprintf("%v", t.Possesions),
		fmt.Sprintf("%.2f", 100*float64(t.Scored)/float64(t.Possesions)),
		fmt.Sprintf("%v", t.Ds),
		fmt.Sprintf("%v", t.OpponentTurns),
		fmt.Sprintf("%.2f", 100*float64(t.Ds)/float64(t.OpponentTurns)),
	})
	if err != nil {
		log.Fatal(err)
	}
	w.Flush()
}
Example #15
0
func TeamHeader(w *csv.Writer) {
	err := w.Write([]string{
		"Scored",
		"Opponent Scored",
		"Possesions",
		"Efficiency",
		"D's",
		"Opponent Turns",
		"Percent D's",
	})
	if err != nil {
		log.Fatal(err)
	}
	w.Flush()
}
Example #16
0
func json2csv(r LineReader, w *csv.Writer, keys []string) {
	var line []byte
	var err error
	for {
		if err == io.EOF {
			return
		}
		line, err = r.ReadBytes('\n')
		if err != nil {
			if err != io.EOF {
				log.Printf("Input ERROR: %s", err)
				break
			}
		}
		if len(line) == 0 {
			continue
		}

		var data map[string]interface{}
		err = json.Unmarshal(line, &data)
		if err != nil {
			log.Printf("ERROR Json Decoding: %s - %v", err, line)
			continue
		}
		var record []string
		for _, key := range keys {
			if v, ok := data[key]; ok {
				switch v.(type) {
				case nil:
					record = append(record, "")
				case float64:
					f, _ := v.(float64)
					if math.Mod(f, 1.0) == 0.0 {
						record = append(record, fmt.Sprintf("%d", int(f)))
					} else {
						record = append(record, fmt.Sprintf("%f", f))
					}
				default:
					record = append(record, fmt.Sprintf("%+v", v))
				}
			} else {
				record = append(record, "")
			}
		}
		w.Write(record)
		w.Flush()
	}
}
Example #17
0
func json2csv(r LineReader, w *csv.Writer, keys []string, printHeader bool) {
	var line []byte
	var err error
	line_count := 0

	var expanded_keys [][]string
	for _, key := range keys {
		expanded_keys = append(expanded_keys, strings.Split(key, "."))
	}

	for {
		if err == io.EOF {
			return
		}
		line, err = r.ReadBytes('\n')
		if err != nil {
			if err != io.EOF {
				log.Printf("Input ERROR: %s", err)
				break
			}
		}
		line_count++
		if len(line) == 0 {
			continue
		}

		if printHeader {
			w.Write(keys)
			w.Flush()
			printHeader = false
		}

		var data map[string]interface{}
		err = json.Unmarshal(line, &data)
		if err != nil {
			log.Printf("ERROR Decoding JSON at line %d: %s\n%s", line_count, err, line)
			continue
		}

		var record []string
		for _, expanded_key := range expanded_keys {
			record = append(record, get_value(data, expanded_key))
		}

		w.Write(record)
		w.Flush()
	}
}
Example #18
0
func saveDatasetAsCsv(dataset []Data, filePath string) {
	f, err := os.Create(filePath)
	if err != nil {
		log.Fatal(err)
	}
	defer f.Close()

	var w *csv.Writer
	if runtime.GOOS == "windows" {
		// on Windows, use Shift-JIS to open csv file via Microsoft Excel.
		converter := bufio.NewWriter(transform.NewWriter(f, japanese.ShiftJIS.NewEncoder()))
		w = csv.NewWriter(converter)
	} else {
		w = csv.NewWriter(f)
	}
	defer w.Flush()

	// Write header first
	header := []string{
		// "日付",
		"証券会社名",
		"n225_sell",
		"n225_buy",
		"n225_net",
		"topix_sell",
		"topix_buy",
		"topix_net",
		"net_total",
	}
	w.Write(header)

	// Write dataset
	for _, data := range dataset {
		var record []string
		// record = append(record, obj.InfoDate)
		record = append(record, data.Company)
		record = append(record, data.N225Sell)
		record = append(record, data.N225Buy)
		record = append(record, data.N225Net)
		record = append(record, data.TopixSell)
		record = append(record, data.TopixBuy)
		record = append(record, data.TopixNet)
		record = append(record, data.NetTotal)
		w.Write(record)
	}
}
Example #19
0
func writeFromChan(writer *csv.Writer, c <-chan interface{}) error {
	// Get the first value. It wil determine the header structure.
	firstValue := <-c
	inValue, inType := getConcreteReflectValueAndType(firstValue) // Get the concrete type
	if err := ensureStructOrPtr(inType); err != nil {
		return err
	}
	inInnerWasPointer := inType.Kind() == reflect.Ptr
	inInnerStructInfo := getStructInfo(inType) // Get the inner struct info to get CSV annotations
	csvHeadersLabels := make([]string, len(inInnerStructInfo.Fields))
	for i, fieldInfo := range inInnerStructInfo.Fields { // Used to write the header (first line) in CSV
		csvHeadersLabels[i] = fieldInfo.getFirstKey()
	}
	if err := writer.Write(csvHeadersLabels); err != nil {
		return err
	}
	write := func(val reflect.Value) error {
		for j, fieldInfo := range inInnerStructInfo.Fields {
			csvHeadersLabels[j] = ""
			inInnerFieldValue, err := getInnerField(val, inInnerWasPointer, fieldInfo.IndexChain) // Get the correct field header <-> position
			if err != nil {
				return err
			}
			csvHeadersLabels[j] = inInnerFieldValue
		}
		if err := writer.Write(csvHeadersLabels); err != nil {
			return err
		}
		return nil
	}
	if err := write(inValue); err != nil {
		return err
	}
	for v := range c {
		val, _ := getConcreteReflectValueAndType(v) // Get the concrete type (not pointer) (Slice<?> or Array<?>)
		if err := ensureStructOrPtr(inType); err != nil {
			return err
		}
		if err := write(val); err != nil {
			return err
		}
	}
	writer.Flush()
	return writer.Error()
}
func writeUsersData(conn *sql.DB, writer *csv.Writer, first int, last int, wg *sync.WaitGroup) {
	defer wg.Done()

	rows, err := conn.Query("select * from users where id between ? and ?", first, last)
	if err != nil {
		fmt.Println(err)
		os.Exit(1)
	}

	var id int
	var name string

	for rows.Next() {
		rows.Scan(&id, &name)
		writer.Write([]string{strconv.Itoa(id), name})
	}

	writer.Flush()

}
Example #21
0
// csvWriter specific method
func (cdre *CdrExporter) writeCsv(csvWriter *csv.Writer) error {
	csvWriter.Comma = cdre.fieldSeparator
	if len(cdre.header) != 0 {
		if err := csvWriter.Write(cdre.header); err != nil {
			return err
		}
	}
	for _, cdrContent := range cdre.content {
		if err := csvWriter.Write(cdrContent); err != nil {
			return err
		}
	}
	if len(cdre.trailer) != 0 {
		if err := csvWriter.Write(cdre.trailer); err != nil {
			return err
		}
	}
	csvWriter.Flush()
	return nil
}
Example #22
0
func IndHeader(w *csv.Writer) {
	err := w.Write([]string{
		"Name",
		"Goals",
		"Assists",
		"Catches",
		"Drops",
		"Catching Efficiency",
		"Throws",
		"Throwaways",
		"Throwing Efficiency",
		"Throws into Drops",
		"Efficiency incld drops",
		"D's",
		"Points Played",
	})
	if err != nil {
		log.Fatal(err)
	}
	w.Flush()
}
Example #23
0
func json2csv(r LineReader, w *csv.Writer, keys []string) {
	var line []byte
	var err error

	var expanded_keys [][]string
	for _, key := range keys {
		expanded_keys = append(expanded_keys, strings.Split(key, "."))
	}

	for {
		if err == io.EOF {
			return
		}
		line, err = r.ReadBytes('\n')
		if err != nil {
			if err != io.EOF {
				log.Printf("Input ERROR: %s", err)
				break
			}
		}
		if len(line) == 0 {
			continue
		}

		var data map[string]interface{}
		err = json.Unmarshal(line, &data)
		if err != nil {
			log.Printf("ERROR Json Decoding: %s - %v", err, line)
			continue
		}

		var record []string
		for _, expanded_key := range expanded_keys {
			record = append(record, get_value(data, expanded_key))
		}

		w.Write(record)
		w.Flush()
	}
}
Example #24
0
func (i IndStats) Summary(w *csv.Writer, n string) {
	err := w.Write([]string{
		n,
		fmt.Sprintf("%v", i.Goals),
		fmt.Sprintf("%v", i.Assists),
		fmt.Sprintf("%v", i.Catches),
		fmt.Sprintf("%v", i.Drops),
		fmt.Sprintf("%.2f", 100*float64(i.Catches)/float64(i.Catches+i.Drops)),
		fmt.Sprintf("%v", i.Throws),
		fmt.Sprintf("%v", i.Throwaways),
		fmt.Sprintf("%.2f", 100*float64(i.Throws-i.Throwaways)/float64(i.Throws)),
		fmt.Sprintf("%v", i.ThrowIntoDrop),
		fmt.Sprintf("%.2f",
			100*float64(i.Throws-i.ThrowIntoDrop-i.Throwaways)/float64(i.Throws)),
		fmt.Sprintf("%v", i.Ds),
		fmt.Sprintf("%v", i.PointsPlayed),
	})
	if err != nil {
		log.Fatal(err)
	}
	w.Flush()
}
Example #25
0
func main() {
	accessToken := flag.String("access-token", "", "Bitly OAuth Access Token - https://bitly.com/a/oauth_apps")
	endpoint := flag.String("api", "https://api-ssl.bitly.com", "Bitly API Endpoint")
	outputFile := flag.String("output-file", "", "output.csv (or blank for stdout)")

	flag.Parse()

	if *accessToken == "" {
		log.Fatalf("-access-token required")
	}

	fetcher := &Fetcher{
		accessToken: *accessToken,
		endpoint:    *endpoint,
	}

	var output *csv.Writer
	switch *outputFile {
	case "":
		output = csv.NewWriter(os.Stdout)
	default:
		f, err := os.Create(*outputFile)
		if err != nil {
			log.Fatalf("%s", err)
		}
		defer f.Close()
		output = csv.NewWriter(f)
	}
	defer output.Flush()
	output.Write([]string{"bitlink", "long_url", "title", "notes", "created", "created_ts"})
	for fetcher.Fetch() {
		for _, l := range fetcher.Bitlinks() {
			output.Write(l.CSV())
		}
	}
	if err := fetcher.Error(); err != nil {
		log.Printf("Error: %s", err)
	}
}
Example #26
0
// writes a record
// to the file, and flushes the write
func WriteRow(r []string, w csv.Writer) {
	w.Write(r)
	w.Flush()
}
func writeAllAtOnce(writer *csv.Writer, data [][]string) {
	writer.WriteAll(data)
	writer.Flush()
}
func (c Tweet) csvWriter(writer *csv.Writer) {
	data := []string{c.User, c.Post_date, c.Message}
	writer.Write(data)
	Info.Println(data)
	writer.Flush()
}
Example #29
0
func (p *plug) daemon() {
	var diskwriter *csv.Writer
	var gzipwriter *gzip.Writer
	fmt.Println("starting foreground daemon ;-)")

	// write csv from disk into the buffer
	fmt.Println("loading history (" + p.csvfile + ")")
	p.buffer.Write(readcsv(p.csvfile))

	// create/append the csvfile on disk
	csvfile, err := os.OpenFile(p.csvfile, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0600)
	if err != nil {
		log.Fatal("Error:", err)
	}
	defer csvfile.Close()

	// create a bufferwriter (appends to csv already in p.buffer)
	bufferwriter := csv.NewWriter(&p.buffer)

	// compressed or not
	if strings.Contains(p.csvfile, ".gz") {
		gzipwriter, _ = gzip.NewWriterLevel(csvfile, gzip.BestCompression)
		defer gzipwriter.Close()
		// wrap csv around gzipwriter
		diskwriter = csv.NewWriter(gzipwriter)
	} else {
		// create a diskwriter (appends to csv on disk)
		diskwriter = csv.NewWriter(csvfile)
	}

	// connect via telnet to the device and login
	conn, err := p.DialTimeout("tcp", p.device, time.Duration(time.Second*30))
	if err != nil {
		log.Fatal("can't connect")
	}

	// create http handlers
	http.HandleFunc("/quit", webQuitHandler(diskwriter, gzipwriter, csvfile))
	http.HandleFunc("/history", webHistoryHandler)
	http.HandleFunc("/stream", webStreamHandler)
	http.HandleFunc("/read.csv", webReadCsvHandler(p))
	http.HandleFunc("/read.json", webReadJsonHandler(p))

	// needed for occasionally flushing on a newline
	recordcount := 0

	// start infinite polling loop
	for {
		// measure how long it takes
		start := time.Now()

		// specify correct format for dygraph
		record := []string{start.Format("2006/01/02 15:04:05")}

		// get statistics from device and cleanup
		status := sendln(conn, plugGetInfoStats, '#')
		status = strings.Replace(status, plugGetInfoStats+"\r\n", "", 1)
		status = strings.Replace(status, "#", "", 1)
		// split up the 4 results a newline
		results := strings.SplitN(status, "\r\n", 4)

		re := regexp.MustCompile("01(I|V|W|E)[0-9]+ 0*([0-9]+)")
		// for each GetInfo result, do a regexp match, adjust value and create a CSV record
		for i, result := range results {
			match := re.FindStringSubmatch(result)
			value := "0"
			// check if we got the right size of slice
			if len(match) == 3 {
				value = match[2]
			}

			temp, _ := strconv.ParseFloat(value, 32)

			switch i {
			case 0:
				// mAmp/10 -> Amp
				value = strconv.FormatFloat(temp/10000, 'f', 2, 32)
			// centiWatt -> Watt
			case 1:
				value = strconv.FormatFloat(temp/100, 'f', 2, 32)
			// mWatt/h -> Watt/h | mVolt -> Volt
			case 2, 3:
				value = strconv.FormatFloat(temp/1000, 'f', 2, 32)
			}
			record = append(record, value)
			recordcount += 1
		}

		// latestentry is needed in JSON for the realtime streaming
		p.latestEntry, _ = json.Marshal(record)

		// write the record to disk
		err := diskwriter.Write(record)
		if err != nil {
			fmt.Println("Error:", err)
		}

		// write the record to buffer (in memory)
		err = bufferwriter.Write(record)
		if err != nil {
			fmt.Println("Error:", err)
		}

		// flush disk every 25 records
		if recordcount%100 == 0 {
			diskwriter.Flush()
			if strings.Contains(p.csvfile, ".gz") {
				gzipwriter.Flush()
			}
		}
		// flush memory immediately
		bufferwriter.Flush()

		if debug {
			fmt.Print(record)
			fmt.Println(" took", time.Since(start))
		}
		// sleep the right amount of time
		time.Sleep(time.Second*time.Duration(p.delay) - time.Since(start))
	}
}
Example #30
0
File: diff.go Project: yukimemi/gfi
func executeDiff(cmd *cobra.Command, args []string) {

	var (
		err error

		match  *regexp.Regexp
		ignore *regexp.Regexp

		csvMap  = make(map[string][]string)
		fisList = make([]FileInfos, 0)
		q       = make(chan info)
		wg      = new(sync.WaitGroup)
	)

	if len(args) == 0 {
		cmd.Help()
		return
	}

	// Get glob file args.
	args, err = core.GetGlobArgs(args)
	if err != nil {
		log.Fatalln(err)
	}

	// Recheck args.
	if len(args) <= 1 {
		cmd.Help()
		return
	}

	// Load csv and store.
	for _, csvPath := range args {
		fmt.Println("Open:", csvPath)
		c, err := os.Open(csvPath)
		if err != nil {
			log.Fatalln(err)
		}
		defer c.Close()
		var reader *csv.Reader
		if sjisIn {
			reader = csv.NewReader(transform.NewReader(c, japanese.ShiftJIS.NewDecoder()))
		} else {
			reader = csv.NewReader(c)
		}
		reader.Comma = '\t'
		// Skip header.
		_, err = reader.Read()
		if err != nil {
			log.Fatalln(err)
		}
		left, err := reader.ReadAll()
		if err != nil {
			log.Fatalln(err)
		}

		// Change data to FileInfos struct.
		fis := make(FileInfos, 0)
		for _, r := range left {
			fis = append(fis, *csvToFileInfo(r))
		}
		fisList = append(fisList, fis)
	}

	// Compile if given matches and ignores.
	if len(matches) != 0 {
		match, err = core.CompileStrs(matches)
		if err != nil {
			log.Fatalln(err)
		}
	}
	if len(ignores) != 0 {
		ignore, err = core.CompileStrs(ignores)
		if err != nil {
			log.Fatalln(err)
		}
	}

	for i, one := range fisList {
		wg.Add(1)
		go func(i int, one FileInfos) {
			defer wg.Done()

			// Diff fileinfo.
			for _, oneFi := range one {
				if fileOnly && oneFi.Type == DIR {
					continue
				}
				if dirOnly && oneFi.Type == FILE {
					continue
				}

				// Ignore check.
				if ignore != nil && ignore.MatchString(oneFi.Full) {
					continue
				}

				// Match check.
				if match != nil && !match.MatchString(oneFi.Full) {
					continue
				}

				for j, other := range fisList {
					if i == j {
						continue
					}

					// Get other's same full path info.
					otherFi, err := findFileInfo(other, oneFi)
					if err == nil {
						// Diff Time.
						if oneFi.Time != otherFi.Time {
							q <- info{
								path:  args[i],
								index: i,
								full:  oneFi.Full,
								diff:  FileTime,
								value: oneFi.Time,
								ford:  oneFi.Type,
							}
						}
						// Diff Size.
						if oneFi.Size != otherFi.Size {
							q <- info{
								path:  args[i],
								index: i,
								full:  oneFi.Full,
								diff:  FileSize,
								value: oneFi.Size,
								ford:  oneFi.Type,
							}
						}
						// Diff Mode.
						if oneFi.Mode != otherFi.Mode {
							q <- info{
								path:  args[i],
								index: i,
								full:  oneFi.Full,
								diff:  FileMode,
								value: oneFi.Mode,
								ford:  oneFi.Type,
							}
						}
					} else {
						q <- info{
							path:  args[i],
							index: i,
							full:  oneFi.Full,
							diff:  FileFull,
							value: oneFi.Full,
							ford:  oneFi.Type,
						}
					}
				}
			}
		}(i, one)
	}

	// Async wait.
	go func() {
		wg.Wait()
		close(q)
	}()

	// Receive diff and store to array.
	for info := range q {
		cnt++
		if !silent {
			fmt.Fprintf(os.Stderr, "Count: %d\r", cnt)
		}
		key := info.full + fmt.Sprint(info.diff)
		if _, ok := csvMap[key]; ok {
			csvMap[key][info.index+3] = info.value
		} else {
			s := make([]string, len(args)+3)
			s[0] = info.full
			s[1] = info.ford
			s[2] = fmt.Sprint(info.diff)
			s[info.index+3] = info.value
			csvMap[key] = s
		}
	}

	if len(csvMap) == 0 {
		fmt.Println("There is no difference !")
		return
	}

	// Output to csv.
	os.MkdirAll(filepath.Dir(out), os.ModePerm)
	c, err := os.Create(out)
	if err != nil {
		log.Fatalln(err)
	}
	defer c.Close()
	var writer *csv.Writer
	if sjisOut {
		writer = csv.NewWriter(transform.NewWriter(c, japanese.ShiftJIS.NewEncoder()))
	} else {
		writer = csv.NewWriter(c)
	}
	writer.Comma = '\t'
	writer.UseCRLF = true

	// Write header.
	err = writer.Write(append(strings.Split(DiffHeader, "\t"), args...))
	if err != nil {
		log.Fatalln(err)
	}

	// map to array.
	var csvArray records
	for _, v := range csvMap {
		csvArray = append(csvArray, v)
	}

	// sort
	if sorts == "" {
		sorts = "0,2"
	}
	sort.Sort(csvArray)

	for _, v := range csvArray {
		err = writer.Write(v)
		if err != nil {
			log.Fatalln(err)
		}
	}
	writer.Flush()
	fmt.Printf("Write to [%s]. ([%d] row)\n", out, cnt)
}