// Scan the datafile in reverse order using a custom separator and function. // The provided function has two params, data and whether we at the end or not . // This function will lock the whole file till it ends . func (this *AOF) ReverseScan(sep []byte, fn func(data []byte, atEOF bool) bool) { this.Lock() defer this.Unlock() pos := int64(0) done := int64(0) data := []byte{} for { this.file.Seek(pos, 2) tmp := make([]byte, len(sep)) n, _ := this.file.Read(tmp) pos -= int64(len(sep)) if n > 0 { done += int64(n) data = append(tmp, data...) } if bytes.Equal(sep, tmp) { if !fn(bytes.Trim(data, string(sep)), false) { break } data = []byte{} } if done >= this.size { fn(bytes.Trim(data, string(sep)), true) break } } data = []byte{} }
// Scan the datafile using a custom separator and function. // The provided function has two params, data and whether we at the end or not . // This function will lock the whole file till it ends . func (this *AOF) Scan(sep []byte, fn func(data []byte, atEOF bool) bool) { this.Lock() defer this.Unlock() this.file.Seek(0, 0) data := []byte{} for { tmp := make([]byte, len(sep)) n, e := this.file.Read(tmp) if n > 0 { data = append(data, tmp[0:n]...) } if e != nil || n == 0 { if len(data) > 0 { fn(bytes.Trim(data, string(sep)), true) } break } if bytes.Equal(sep, tmp) { if !fn(bytes.Trim(data, string(sep)), false) { break } data = []byte{} } } data = []byte{} }
func WiteToTSDB(channel chan []byte, cfg *Config) { START: tcpAddr, err := net.ResolveTCPAddr("tcp4", cfg.OPENTSDB_ADDR) if err != nil { panic(fmt.Sprintf("error opentsdb address(%s)", cfg.OPENTSDB_ADDR)) } conn, err := net.DialTCP("tcp4", nil, tcpAddr) if err != nil { slog.Error("connect tsdb server(%s) error(%s)", tcpAddr, err) time.Sleep(time.Second * 30) goto START } slog.Info("connected to opentsdb server %s", cfg.OPENTSDB_ADDR) conn.SetKeepAlive(true) defer conn.Close() for { select { case data, ok := <-channel: if ok { data = append(data, '\n') length, err := conn.Write(data) if err != nil { slog.Error("write opentsdb error %s", err) channel <- bytes.Trim(data, "\n") time.Sleep(time.Second * 30) goto START } dlog.Info("write opentsdb %d bytes, data:(%s)", length, string(bytes.Trim(data, "\n"))) } } } }
func parseMeta(buf *bytes.Buffer, repo string) { var name string pack := map[string]string{} pack["REPO"] = repo _, _ = buf.ReadByte() for { key, err := buf.ReadBytes('%') if err == io.EOF { break } key = bytes.Trim(key, "%") values, _ := buf.ReadBytes('%') values = bytes.Trim(values, "%") values = bytes.Replace(values, []byte("\n"), []byte(" "), -1) values = bytes.Trim(values, " ") if string(key) == "NAME" { name = string(values) } pack[string(key)] = string(values) } v, _ := packages[name] if v != nil { printf("%s exists\n", name) } packages[name] = pack }
func ProtrackerParse(rawData *bytes.Buffer) (samples []Sample) { modTitle := string(bytes.Trim(rawData.Next(20), zerostring)) for i := 0; i < 31; i++ { sampleTitle := modTitle + " - " + string(bytes.Trim(rawData.Next(22), zerostring)) sampleLength := BigEndianBytesToInt(rawData.Next(2)) * 2 if sampleLength >= uint16(2) && len(sampleTitle) > 0 { samples = append(samples, Sample{ Title: sampleTitle, Length: int(sampleLength), }) } //discard finetune (1 byte), volume (1 byte), repeat info (4 bytes) _ = rawData.Next(6) } songLength := rawData.Next(2)[0] //discard unused 127 byte. patternOrder := rawData.Next(133)[:songLength] // discard pattern data for i := 0; i < int(biggest(patternOrder))+1; i++ { // patterns start at 00, so add 1 _ = rawData.Next(1024) } for i, s := range samples { samples[i].Data = rawData.Next(s.Length) } fmt.Println("Title:", modTitle, "Samples:", len(samples)) return }
// checkVersionMatch makes sure that the go command in the path matches // the GOROOT that will be used for building the cross compiler. // // This is typically not a problem when using the a release version, but // it is easy for development environments to drift, causing unexpected // errors. // // checkVersionMatch is run after the tmpGoroot is built, so the dist // command is available to call. func checkVersionMatch(tmpGoroot string, version []byte) error { if buildN { return nil } version = bytes.TrimPrefix(version, []byte("go version ")) version = bytes.Trim(version, "\n") dist := filepath.Join(tmpGoroot, "pkg/tool/"+goEnv("GOOS")+"_"+goEnv("GOARCH")+"/dist") if goos == "windows" { dist += ".exe" } cmd := exec.Command(dist, "version") cmd.Dir = tmpGoroot cmd.Env = []string{ "GOROOT=" + tmpGoroot, `PATH=` + os.Getenv("PATH"), } cmd.Env = appendCommonEnv(cmd.Env) out, err := cmd.CombinedOutput() if err != nil { return fmt.Errorf("cannot get cmd/dist version: %v (%s)", err, out) } out = bytes.Trim(out, "\n") if !bytes.HasPrefix(version, out) { return fmt.Errorf("Go command out of sync with GOROOT. The command `go version` reports:\n\t%s\nbut the GOROOT %q is version:\n\t%s\nRebuild Go.", version, goEnv("GOROOT"), out) } return nil }
func readConfig() (cfg, error) { var config cfg cmd := exec.Command("git", "config", "erickson.url") output, err := cmd.Output() if err != nil { if _, ok := err.(*exec.ExitError); ok { return cfg{}, errNoConfig } else { return cfg{}, err } } config.url = string(bytes.Trim(output, "\n")) cmd = exec.Command("git", "config", "erickson.username") output, err = cmd.Output() if err != nil { if _, ok := err.(*exec.ExitError); ok { // the config option doesn't exist, that's ok return config, nil } else { return cfg{}, err } } config.username = string(bytes.Trim(output, "\n")) return config, nil }
func (p *parser) parseLoop() { if p.tokens[p.position+1].id != TOKEN_OPEN_BRACE { panic("Syntax error: Expected opening brace after loop declaration") } //get the arguments for the loop keyInCollection := bytes.Split(p.currentToken().match, []byte(" in ")) key := string(bytes.Trim(keyInCollection[0], " ")) collection := collectionForKey(p.args, bytes.Trim(keyInCollection[1], " ")) //get the scope p.position += 2 //move past the opening brace scopeParser := p.getScope() p.position += len(scopeParser.tokens) + 1 //+1 for closing brace //iterate through the collection and make a recusive call for each object in the collection keeping the same scope. objects := make([][]byte, collection.Len()) for i := 0; i < collection.Len(); i++ { //reset the fields of the scope parser and set the new loop variable scopeParser.args[key] = collection.Get(i) scopeParser.position = 0 scopeParser.result = []byte{} scopeParser.parse() objects[i] = scopeParser.result } //add the resulting array to the result p.appendArray(objects) //remove the loop variable from args delete(scopeParser.args, key) }
func DetectAtxHeader(first, second Line, detectors Detectors) Handler { if !bytes.HasPrefix(first.Bytes, []byte("#")) { return nil } done := false return HandlerFunc(func(line Line, ctx Context) (bool, error) { if done { return false, nil } done = true block := md.AtxHeaderBlock{ Raw: md.Raw{md.Run(line)}, } text := bytes.TrimRight(line.Bytes, "\n") text = bytes.Trim(text, "#") if len(text) > 0 { block.Level, _ = mdutils.OffsetIn(line.Bytes, text) } else { block.Level = len(bytes.TrimRight(line.Bytes, "\n")) } if block.Level > 6 { block.Level = 6 } spanRegion := md.Raw{md.Run{ Line: line.Line, Bytes: bytes.Trim(text, mdutils.Whites), }} ctx.Emit(block) parseSpans(spanRegion, ctx) ctx.Emit(md.End{}) return true, nil }) }
func (p *parser) parseInclude() { statement := p.currentToken().match params := bytes.Split(statement[8:len(statement)-1], []byte{','}) //strip away include() and split by comma templateName := p.workingDir + string(bytes.Trim(params[0], " ")) template, err := ioutil.ReadFile(templateName + ".goson") //probably cannot find the template file if err != nil { panic(err) } lastPathSegmentStart := strings.LastIndex(templateName, "/") var workingDir string if lastPathSegmentStart >= 0 { workingDir = templateName[0 : lastPathSegmentStart+1] } tokens := Tokenize(template) args := explodeIntoArgs(objectForKey(p.args, bytes.Trim(params[1], " "))) includeParser := &parser{workingDir: workingDir, tokens: tokens, args: args} includeParser.parse() p.appendJson(includeParser.result) p.position++ }
func TestBundleWithECDSAKeyMarshalJSON(t *testing.T) { b := newCustomizedBundlerFromFile(t, testCFSSLRootBundle, testCFSSLIntBundle, "") bundle, _ := b.BundleFromFile(leafECDSA256, leafKeyECDSA256, Optimal, "") jsonBytes, err := json.Marshal(bundle) if err != nil { t.Fatal(err) } var obj map[string]interface{} err = json.Unmarshal(jsonBytes, &obj) if err != nil { t.Fatal(err) } key := obj["key"].(string) keyBytes, _ := ioutil.ReadFile(leafKeyECDSA256) keyBytes = bytes.Trim(keyBytes, " \n") if key != string(keyBytes) { t.Fatal("key is not recovered.") } cert := obj["crt"].(string) certBytes, _ := ioutil.ReadFile(leafECDSA256) certBytes = bytes.Trim(certBytes, " \n") if cert != string(certBytes) { t.Fatal("cert is not recovered.") } keyType := obj["key_type"] if keyType != "256-bit ECDSA" { t.Fatal("Incorrect key type:", keyType) } }
func ReadSequence(file string) { f, err := os.Open(file) if err != nil { panic(err) } defer f.Close() if file[len(file)-6:] == ".fasta" { scanner := bufio.NewScanner(f) byte_array := make([]byte, 0) for scanner.Scan() { line := scanner.Bytes() if len(line) > 0 && line[0] != '>' { byte_array = append(byte_array, bytes.Trim(line, "\n\r ")...) } } SEQ = append(byte_array, byte('$')) } else { byte_array, err := ioutil.ReadFile(file) if err != nil { panic(err) } SEQ = append(bytes.Trim(byte_array, "\n\r "), byte('$')) } }
func instrumentFromRaw(raw *rawInstrument) *Instrument { return &Instrument{ Filename: string(bytes.Trim(raw.DOSFilename[:], "\x00")), NewNoteAction: NewNoteAction(raw.NNA), DuplicateCheckType: DuplicateCheckType(raw.DCT), DuplicateCheckAction: DuplicateCheckAction(raw.DCA), FadeOut: raw.FadeOut, PitchPanSeparation: raw.PPS, PitchPanCenter: raw.PPC, GlobalVolume: raw.GbV, DefaultPan: raw.DfP & 0x7f, DefaultPanOn: raw.DfP&0x80 == 0, VolumeSwing: raw.RV, PanSwing: raw.RP, NumSamples: raw.NoS, Name: string(bytes.Trim(raw.Name[:], "\x00")), DefaultCutoff: raw.IFC, DefaultResonance: raw.IFR, MIDIChannel: raw.MCh, MIDIProgram: raw.MPr, MIDIBankLow: int8(raw.MIDIBnk[0]), MIDIBankHigh: int8(raw.MIDIBnk[1]), KeyboardTable: raw.KeyboardTable, VolumeEnvelope: envelopeFromRaw(&raw.VolumeEnvelope), PanningEnvelope: envelopeFromRaw(&raw.PanningEnvelope), PitchEnvelope: envelopeFromRaw(&raw.PitchEnvelope), } }
// NewPageOfMarkdown create new page from markdown file func NewPageOfMarkdown(file, slug string, page *Page) (*Page, error) { // page-node need not read file if page != nil && page.Node == true { return page, nil } fileBytes, err := ioutil.ReadFile(file) if err != nil { return nil, err } if len(fileBytes) < 3 { return nil, fmt.Errorf("page content is too less") } if page == nil { dataSlice := bytes.SplitN(fileBytes, postBlockSeparator, 3) if len(dataSlice) != 3 { return nil, fmt.Errorf("page need front-matter block and markdown block") } idx := getFirstBreakByte(dataSlice[1]) if idx == 0 { return nil, fmt.Errorf("page need front-matter block and markdown block") } formatType := detectFormat(string(dataSlice[1][:idx])) if formatType == 0 { return nil, fmt.Errorf("page front-matter block is unrecognized") } page = new(Page) if formatType == FormatTOML { if err = toml.Unmarshal(dataSlice[1][idx:], page); err != nil { return nil, err } } if formatType == FormatINI { iniObj, err := ini.Load(dataSlice[1][idx:]) if err != nil { return nil, err } if err = newPageFromIniObject(iniObj, page, "DEFAULT", "meta"); err != nil { return nil, err } } if page.Node == false { page.Bytes = bytes.Trim(dataSlice[2], "\n") } } else { page.Bytes = bytes.Trim(fileBytes, "\n") } page.fileURL = file if page.Slug == "" { page.Slug = slug } if page.Date == "" && page.Node == false { // page-node need not time t, _ := com.FileMTime(file) page.dateTime = time.Unix(t, 0) } return page, page.normalize() }
func client(c net.Conn) { // Close the connection when this function returns. defer c.Close() br := bufio.NewReader(c) fmt.Fprintf(c, "Please enter your name: ") buf, err := br.ReadBytes('\n') if err != nil { error_(err, -1) return } name := string(bytes.Trim(buf, " \t\n\r\x00")) if name == "" { fmt.Fprintf(c, "!!! %v is invalid !!!\n", name) } // Try to add the connection to the map. if !clients.Add(name, c) { fmt.Fprintf(c, "!!! %v is not available !!!\n", name) return } // Send a message telling the clients who connected. fmt.Fprintf(clients, "+++ %v connected +++\n", name) // Send a disconnected message when the function returns. defer fmt.Fprintf(clients, "--- %v disconnected ---\n", name) // Remove the client from the list. defer delete(clients, name) for { buf, err = br.ReadBytes('\n') if err != nil { break } buf = bytes.Trim(buf, " \t\n\r\x00") // Ignore empty messages. if len(buf) == 0 { continue } switch { // Support for '/me' type messages. case string(buf[0:3]) == "/me": buf = append([]byte(name), buf[3:]...) default: // Prepend the user-name and '> '. buf = append([]byte(name+"> "), buf...) } // Send the message to all the clients. fmt.Fprintf(clients, "%v\n", string(buf)) } }
// GetUsbStrings returns the device information. Note, strings may be empty. func (dev *Context) GetUsbStrings() (manufact, product, serial string, err error) { m := make([]byte, 257) // includes space for NULL byte p := make([]byte, 257) s := make([]byte, 257) i := int(C.rtlsdr_get_usb_strings((*C.rtlsdr_dev_t)(dev), (*C.char)(unsafe.Pointer(&m[0])), (*C.char)(unsafe.Pointer(&p[0])), (*C.char)(unsafe.Pointer(&s[0])))) return string(bytes.Trim(m, "\x00")), string(bytes.Trim(p, "\x00")), string(bytes.Trim(s, "\x00")), libError(i) }
// GetDeviceUsbStrings returns the information of a device by index. func GetDeviceUsbStrings(index int) (manufact, product, serial string, err error) { m := make([]byte, 257) // includes space for NULL byte p := make([]byte, 257) s := make([]byte, 257) i := int(C.rtlsdr_get_device_usb_strings(C.uint32_t(index), (*C.char)(unsafe.Pointer(&m[0])), (*C.char)(unsafe.Pointer(&p[0])), (*C.char)(unsafe.Pointer(&s[0])))) return string(bytes.Trim(m, "\x00")), string(bytes.Trim(p, "\x00")), string(bytes.Trim(s, "\x00")), libError(i) }
func save_file(title string, content []byte) { title = string(bytes.Trim([]byte(title), "\x00")) content = bytes.Trim(content, "\x00") file, err := os.Create(title) if err != nil { fmt.Println("Error during file creation -> ", err) } _, err = io.WriteString(file, string(content)) if err != nil { fmt.Println("Error during writing -> ", err) } }
// NewPostOfMarkdown create new post from markdown file func NewPostOfMarkdown(file string, post *Post) (*Post, error) { fileBytes, err := ioutil.ReadFile(file) if err != nil { return nil, err } if len(fileBytes) < 3 { return nil, fmt.Errorf("post content is too less") } if post == nil { dataSlice := bytes.SplitN(fileBytes, postBlockSeparator, 3) if len(dataSlice) != 3 { return nil, fmt.Errorf("post need front-matter block and markdown block") } idx := getFirstBreakByte(dataSlice[1]) if idx == 0 { return nil, fmt.Errorf("post need front-matter block and markdown block") } formatType := detectFormat(string(dataSlice[1][:idx])) if formatType == 0 { return nil, fmt.Errorf("post front-matter block is unrecognized") } post = new(Post) if formatType == FormatTOML { if err = toml.Unmarshal(dataSlice[1][idx:], post); err != nil { return nil, err } } if formatType == FormatINI { iniObj, err := ini.Load(dataSlice[1][idx:]) if err != nil { return nil, err } section := iniObj.Section("DEFAULT") if err = newPostFromIniSection(section, post); err != nil { return nil, err } } post.Bytes = bytes.Trim(dataSlice[2], "\n") } else { post.Bytes = bytes.Trim(fileBytes, "\n") } post.fileURL = file if post.Date == "" { t, _ := com.FileMTime(file) post.dateTime = time.Unix(t, 0) } return post, post.normalize() }
func TestRestoreDump(t *testing.T) { b1, err := ioutil.ReadFile(path.Join(TestDir, "data1.json")) // with validators if err != nil { t.Fatal(err) } b2, err := ioutil.ReadFile(path.Join(TestDir, "data2.json")) // without if err != nil { t.Fatal(err) } b1 = bytes.Trim(b1, "\n") b2 = bytes.Trim(b2, "\n") // restore to a memdir config.Set("db_backend", "memdb") cfg.ApplyConfig(config) // Notify modules of new config CoreRestore("", b1) stateDB := dbm.GetDB("state") st := sm.LoadState(stateDB) acc := st.GetAccount(ptypes.GlobalPermissionsAddress) fmt.Println(acc) dump1 := CoreDump(true) // with validators if bytes.Compare(b1, dump1) != 0 { ld, lb := len(dump1), len(b1) max := int(math.Max(float64(ld), float64(lb))) n := 100 for i := 0; i < max/n; i++ { dd := dump1[i*n : (i+1)*n] bb := b1[i*n : (i+1)*n] if bytes.Compare(dd, bb) != 0 { t.Fatalf("Error in dumps! Got \n\n\n\n %s \n\n\n\n Expected \n\n\n\n %s", dd, bb) } } } CoreRestore("", b2) dump2 := CoreDump(false) //without validators if bytes.Compare(b2, dump2) != 0 { ld, lb := len(dump2), len(b2) max := int(math.Max(float64(ld), float64(lb))) n := 100 for i := 0; i < max/n; i++ { dd := dump2[i*n : (i+1)*n] bb := b2[i*n : (i+1)*n] if bytes.Compare(dd, bb) != 0 { t.Fatalf("Error in dumps! Got \n\n\n\n %s \n\n\n\n Expected \n\n\n\n %s", dd, bb) } } } }
func (dsl *DeviceStateLocation) String() string { if dsl == nil { return "<*lifxpayloads.DeviceStateLocation(nil)>" } loc := string(bytes.Trim(dsl.Location[0:], "\x00")) label := string(bytes.Trim(dsl.Label[0:], "\x00")) return fmt.Sprintf( "<*lifxpayloads.DeviceStateLocation(%p): Location: \"%s\", Label: \"%s\", UpdatedAt: %d>", dsl, loc, label, dsl.UpdatedAt, ) }
func (dsg *DeviceStateGroup) String() string { if dsg == nil { return "<*lifxpayloads.DeviceStateGroup(nil)>" } group := string(bytes.Trim(dsg.Group[0:], "\x00")) label := string(bytes.Trim(dsg.Label[0:], "\x00")) return fmt.Sprintf( "<*lifxpayloads.DeviceStateGroup(%p): Group: \"%s\", Label: \"%s\", UpdatedAt: %d>", dsg, group, label, dsg.UpdatedAt, ) }
// DecodeFile decodes the drum machine file found at the provided path // and returns a pointer to a parsed pattern which is the entry point to the // rest of the data. func DecodeFile(path string) (*Pattern, error) { p := &Pattern{} raw, err := ioutil.ReadFile(path) if err != nil { return p, errors.New("open: error reading file") } fileBuf := bytes.NewBuffer(raw) if bytes.Equal(fileBuf.Next(13), header) { // Check file for a valid header var msgLength uint8 binary.Read(bytes.NewBuffer(fileBuf.Next(1)), binary.LittleEndian, &msgLength) // Get length of message (to pass testcase 5) if err != nil { return p, errors.New("decode: unable to decode message length") } msgBuf := bytes.NewBuffer(fileBuf.Next(int(msgLength))) // Create new buffer with the message fileBuf.Reset() // Clear the buffer that we're not using anymore p.Version = string(bytes.Trim(msgBuf.Next(32), "\x00")) p.Tempo = btof(msgBuf.Next(4)) for msgBuf.Len() > 0 { id, err := btoi(msgBuf.Next(1)) if err != nil { return p, errors.New("decode: unable to decode id") } nameLength, err := btoi(bytes.Trim(msgBuf.Next(4), "\x00")) if err != nil { return p, errors.New("decode: unable to decode name length") } name := string(msgBuf.Next(nameLength)) rhythm := msgBuf.Next(16) t := Track{id, name, rhythm} p.Tracks = append(p.Tracks, t) } } else { return p, errors.New("decode: missing 'SPLICE' header") } return p, nil }
func getStringValue(value interface{}, output Argument) (string, error) { typ := output.Type if typ.IsSlice || typ.IsArray { if typ.T == BytesTy || typ.T == FixedBytesTy { return string(bytes.Trim(value.([]byte), "\x00")[:]), nil } var val []string if typ.Elem.T == FixedBytesTy { byteVals := reflect.ValueOf(value) for i := 0; i < byteVals.Len(); i++ { val = append(val, string(bytes.Trim(byteVals.Index(i).Interface().([]byte), "\x00")[:])) } } else { val = strings.Split(fmt.Sprintf("%v", value), " ") } StringVal := strings.Join(val, ",") if typ.Elem.T == FixedBytesTy { StringVal = strings.Join([]string{"[", StringVal, "]"}, "") } return StringVal, nil } else { switch typ.T { case IntTy: switch typ.Size { case 8, 16, 32, 64: return fmt.Sprintf("%v", value), nil default: return common.S256(value.(*big.Int)).String(), nil } case UintTy: switch typ.Size { case 8, 16, 32, 64: return fmt.Sprintf("%v", value), nil default: return common.U256(value.(*big.Int)).String(), nil } case BoolTy: return strconv.FormatBool(value.(bool)), nil case StringTy: return value.(string), nil case AddressTy: return strings.ToUpper(Bytes2Hex(value.(Address).Bytes())), nil default: return "", fmt.Errorf("Could not unpack value %v", value) } } }
// Overrides the default Marshal method for Utmp // // MarshalJSON correctly interprets the address field and byte arrays into // properly formatted strings stripped of empty padding func (u Utmp) MarshalJSON() ([]byte, error) { utmp := map[string]interface{}{} utmp["type"] = u.Type utmp["pid"] = u.Pid utmp["device"] = string(bytes.Trim(u.Device[:], "\u0000")) utmp["id"] = string(bytes.Trim(u.Id[:], "\u0000")) utmp["user"] = string(bytes.Trim(u.User[:], "\u0000")) utmp["host"] = string(bytes.Trim(u.Host[:], "\u0000")) utmp["exit"] = u.Exit utmp["session"] = u.Session utmp["time"] = u.Time utmp["address"] = AddrToString(u.Addr) return json.Marshal(utmp) }
func (tr *Reader) octal(b []byte) int64 { // Check for binary format first. if len(b) > 0 && b[0]&0x80 != 0 { var x int64 for i, c := range b { if i == 0 { c &= 0x7f // ignore signal bit in first byte } x = x<<8 | int64(c) } return x } // Because unused fields are filled with NULs, we need // to skip leading NULs. Fields may also be padded with // spaces or NULs. // So we remove leading and trailing NULs and spaces to // be sure. b = bytes.Trim(b, " \x00") if len(b) == 0 { return 0 } x, err := strconv.ParseUint(cString(b), 8, 64) if err != nil { tr.err = err } return int64(x) }
func connect(userID string, clientID string) { ws, err := websocket.Dial(getServer(), "", "http://localhost/") if err != nil { Debug(err.Error()) return } message := []byte(fmt.Sprintf("%s:%s", userID, clientID)) _, err = ws.Write(message) if err != nil { Debug(err.Error()) return } Debug(fmt.Sprintf("Sent: %s\n", message)) for { var msg = make([]byte, 512) _, err = ws.Read(msg) if err != nil { Debug(err.Error()) return } Debug(fmt.Sprintf("Received: %s\n", msg)) CmdChan <- string(bytes.Trim(msg, "\x00")) } }
func main() { if len(os.Args) != 3 { fmt.Printf("Usage: %s <file.srt> <time offset>\n", os.Args[0]) // Todo: Potential segfault if execve() was called without arguments in argv. return } file, err := os.OpenFile(os.Args[1], os.O_RDWR, 0) if err != nil { fmt.Println(err.Error()) return } defer file.Close() scanner := bufio.NewScanner(file) // By default, splits arround '\n' characters var newFile bytes.Buffer offsetMilli, _ := strconv.Atoi(os.Args[2]) for scanner.Scan() { processLine(scanner.Text(), &newFile, offsetMilli) } _, err = file.Seek(0, 0) if err != nil { fmt.Println(err) return } _, err = file.Write(bytes.Trim(newFile.Bytes(), "\x00")) if err != nil { fmt.Println(err) return } }
// isDate detects if we see one of the following formats: // August 12, 2014 // Aug 10, 2014 1:02 PM EDT // Sunday August 10 2014 // Sunday, August 10, 2014 2:36 PM EDT // Monday, August 11, 2014 9:18:59 AM // Sat., Feb. 7, 2015 04:35 PM // Tue., Apr. 21, 2015 4:17 p.m. func isDate(line []byte) bool { // Trim dots 'n periods line = bytes.Trim(line, "• .\u00a0") // check if it starts with a day or month dateStart := false for _, day := range daysOfWeek { if bytes.HasPrefix(line, day) { dateStart = true break } } if !dateStart { for _, day := range daysOfWeekShort { if bytes.HasPrefix(line, day) { dateStart = true break } } } if !dateStart { for _, month := range months { if bytes.HasPrefix(line, month) { dateStart = true break } } } if !dateStart { return false } // check if it ends with a timezone/daytime/year dateEnd := false for _, ap := range amPM { if bytes.HasSuffix(line, ap) { dateEnd = true break } } if !dateEnd { // newshound started in 2012. adjust if you want older data for i := 2012; i <= time.Now().Year(); i++ { if bytes.HasSuffix(line, []byte(strconv.Itoa(i))) { dateEnd = true break } } } if !dateEnd { for _, zone := range timezones { if bytes.HasSuffix(line, zone) { dateEnd = true break } } } return dateEnd }
func findNews(text [][]byte, address, sender string) []byte { // prep the address for searching against text addr := []byte(address) addrStart := bytes.SplitN(addr, []byte("@"), 2)[0] // so we can reuse similar addresses? if len(addrStart) > 15 { addrStart = addrStart[:15] } var news [][]byte badLines := 0 senderBytes := bytes.ToLower([]byte(sender)) for _, line := range text { line := bytes.Trim(line, "-| ?") if isNews(line, addr, addrStart, senderBytes) { badLines = 0 news = append(news, line) } else if (len(news) > 0) && (len(line) > 0) { badLines++ } // get at most 3 or quit if we have bad rows and at least 2 or over 2 bads and 1 good if (len(news) >= 3) || ((badLines > 0) && (len(news) >= 2)) || ((badLines >= 2) && (len(news) >= 1)) { break } } return bytes.Join(news, []byte(" ")) }