// Read a single sequence and return it or an error. // TODO: Does not read multi-line fastq. func (self *Reader) Read() (s seq.Sequence, err error) { var ( buff, line, label []byte isPrefix bool seqBuff []alphabet.QLetter t seqio.SequenceAppender ) inQual := false for { if buff, isPrefix, err = self.r.ReadLine(); err == nil { if isPrefix { line = append(line, buff...) continue } else { line = buff } line = bytes.TrimSpace(line) if len(line) == 0 { continue } switch { case !inQual && line[0] == '@': t = self.readHeader(line) label, line = line, nil case !inQual && line[0] == '+': if len(label) == 0 { return nil, bio.NewError("fastq: no header line parsed before +line in fastq format", 0) } if len(line) > 1 && bytes.Compare(label[1:], line[1:]) != 0 { return nil, bio.NewError("fastq: quality header does not match sequence header", 0) } inQual = true case !inQual: line = bytes.Join(bytes.Fields(line), nil) seqBuff = make([]alphabet.QLetter, len(line)) for i := range line { seqBuff[i].L = alphabet.Letter(line[i]) } case inQual: line = bytes.Join(bytes.Fields(line), nil) if len(line) != len(seqBuff) { return nil, bio.NewError("fastq: sequence/quality length mismatch", 0) } for i := range line { seqBuff[i].Q = alphabet.DecodeToQphred(line[i], self.enc) } t.AppendQLetters(seqBuff...) return t, nil } } else { return } } panic("cannot reach") }
func (auth *Auth) assertion() ([]byte, error) { header, err := json.Marshal( map[string]interface{}{ "typ": "JWT", "alg": "RS256", }) if err != nil { return nil, err } parts := [3][]byte{} parts[0] = b64urlencode(header) now := time.Now() claims, err := json.Marshal( map[string]interface{}{ "iss": auth.Email, "scope": strings.Join(auth.Scope, " "), "aud": aud, "exp": now.Add(time.Hour).Unix(), "iat": now.Unix(), }) if err != nil { return nil, err } parts[1] = b64urlencode(claims) sha := sha256.New() sha.Write(bytes.Join(parts[:2], separator)) signature, err := rsa.SignPKCS1v15(rand.Reader, auth.Key, crypto.SHA256, sha.Sum(nil)) if err != nil { return nil, err } parts[2] = b64urlencode(signature) return bytes.Join(parts[:], separator), nil }
func toCamelCase(x string) string { if len(x) == 0 { return "" } output := make([]byte, 0) uppercase := true for len(x) > 0 { v, size := utf8.DecodeRuneInString(x) // If underscore, append and keep going. if v == '_' { uppercase = true } else if unicode.IsLetter(v) { if uppercase { uppercase = false buf := make([]byte, size) utf8.EncodeRune(buf, unicode.ToUpper(v)) output = bytes.Join([][]byte{output, buf}, nil) } else if unicode.IsUpper(v) { buf := make([]byte, size) utf8.EncodeRune(buf, v) output = bytes.Join([][]byte{output, buf}, []byte("_")) } } x = x[size:] } return string(output) }
/* Pack return data */ func packPUBLISH(dup bool, qos int, retain bool, topic string, messageID uint16, payload []byte) []byte { var remaining []byte if qos == 0 { remaining = bytes.Join([][]byte{ str_to_bytes(topic), payload, }, nil) } else { remaining = bytes.Join([][]byte{ str_to_bytes(topic), []byte{byte(messageID / 256), byte(messageID % 256)}, payload, }, nil) } header := byte(PUBLISH*16 + qos*2) if dup { header |= 0x80 } if retain { header |= 0x01 } return bytes.Join([][]byte{ []byte{header}, encodeRemainLength(len(remaining)), remaining, }, nil) }
// This test validates errors returned when data blocks exceed size limits. func TestReader_blockTooLarge(t *testing.T) { // the compressed chunk size is within the allowed encoding size // (maxEncodedBlockSize). but the uncompressed data is larger than allowed. badstream := bytes.Join([][]byte{ streamID, compressedChunk(t, make([]byte, (1<<24)-5)), }, nil) r := NewReader(bytes.NewBuffer(badstream), true) p := make([]byte, 1) n, err := r.Read(p) if err == nil { t.Fatalf("read: expected error") } if n != 0 { t.Fatalf("read: read data from the stream") } // the compressed chunk size is within the allowed encoding size // (maxEncodedBlockSize). but the uncompressed data is larger than allowed. badstream = bytes.Join([][]byte{ streamID, uncompressedChunk(t, make([]byte, (1<<24)-5)), }, nil) r = NewReader(bytes.NewBuffer(badstream), true) p = make([]byte, 1) n, err = r.Read(p) if err == nil { t.Fatalf("read: expected error") } if n != 0 { t.Fatalf("read: read data from the stream") } }
func (self encoder) encode(in interface{}) ([]byte, error) { switch v := in.(type) { case nil: return encoderNil, nil case string: out := bytes.Join([][]byte{{StringHeader}, []byte(v), endOfLine}, nil) return out, nil case error: out := bytes.Join([][]byte{{ErrorHeader}, []byte(v.Error()), endOfLine}, nil) return out, nil case int: out := bytes.Join([][]byte{{IntegerHeader}, []byte(strconv.Itoa(v)), endOfLine}, nil) return out, nil case []byte: out := bytes.Join([][]byte{{BulkHeader}, []byte(strconv.Itoa(len(v))), endOfLine, v, endOfLine}, nil) return out, nil case []interface{}: var buf bytes.Buffer buf.Write(bytes.Join([][]byte{{ArrayHeader}, []byte(strconv.Itoa(len(v))), endOfLine}, nil)) for i := range v { chunk, err := self.encode(v[i]) if err != nil { return nil, err } buf.Write(chunk) } return buf.Bytes(), nil } return nil, ErrInvalidInput }
func Encode(jwt interface{}, key []byte, algorithm string) ([]byte, error) { shaFunc, err := getHash(algorithm) if err != nil { return []byte{}, err } sha := hmac.New(shaFunc, key) segments := [3][]byte{} header, err := json.Marshal( map[string]interface{}{ "typ": "JWT", "alg": algorithm, }) if err != nil { return []byte{}, err } segments[0] = base64url_encode(header) claims, err := json.Marshal(jwt) if err != nil { return []byte{}, err } segments[1] = base64url_encode(claims) sha.Write(bytes.Join(segments[:2], separator)) segments[2] = base64url_encode(sha.Sum(nil)) return bytes.Join(segments[:], separator), nil }
// TestReader_maxBlock validates bounds checking on encoded and decoded data // (4.2 Compressed Data). func TestReader_maxBlock(t *testing.T) { // decompressing a block with compressed length greater than MaxBlockSize // should succeed. buf := bytes.NewReader(bytes.Join([][]byte{ streamID, compressedChunkGreaterN(t, MaxBlockSize), }, nil)) r := NewReader(buf, true) b, err := ioutil.ReadAll(r) if err != nil { t.Fatal(err) } if len(b) != MaxBlockSize { t.Fatalf("bad read (%d bytes)", len(b)) } // decompressing should fail if the block with decompressed length greater // than MaxBlockSize. buf = bytes.NewReader(bytes.Join([][]byte{ streamID, compressedChunk(t, make([]byte, MaxBlockSize+1)), }, nil)) r = NewReader(buf, true) b, err = ioutil.ReadAll(r) if err == nil { t.Fatal("unexpected success") } if len(b) > 0 { t.Fatalf("unexpected read %q", b) } }
func checkRequest(expected, got *RequestHeader) error { if expected == got { return nil } if expected == nil || got == nil { return fmt.Errorf("expected=%v, got=%v", expected, got) } if expected.BodyRead != got.BodyRead { return fmt.Errorf("var BodyRead not match, expected=%v, got=%v", expected.BodyRead, got.BodyRead) } if expected.BodySize != got.BodySize { return fmt.Errorf("var BodySize not match, expected=%v, got=%v", expected.BodySize, got.BodySize) } expectedReqLine := bytes.Join(expected.ReqLineTokens, []byte{}) gotReqLine := bytes.Join(got.ReqLineTokens, []byte{}) if !bytes.Equal(expectedReqLine, gotReqLine) { return fmt.Errorf("var ReqLineTokens not match, expected=%v, got=%v", string(expectedReqLine), string(gotReqLine)) } if len(expected.Headers) != len(got.Headers) { return fmt.Errorf("var Headers not match, expected=%v, got=%v", expected.HeadersStr(), got.HeadersStr(), ) } return nil }
func TestDecodeResponse(t *testing.T) { inner := bytes.Join([][]byte{ []byte{0xA2}, DeTestByteString("id").Data, []byte{0x02}, DeTestByteString("result").Data, []byte{0x81}, DeTestByteString("x").Data, }, []byte{}) outer := bytes.Join([][]byte{ []byte{0xD8, 0x18, 0x58, byte(len(inner))}, inner, }, []byte{}) decoder := codec.NewDecoderBytes(outer, cbor) var actual Response expected := Response{ ID: 2, Result: []interface{}{[]byte("x")}, Error: "", } err := decoder.Decode(&actual) if assert.NoError(t, err) { assert.Equal(t, expected, actual) } }
func ExampleMultipleOutputsDoer() { multiDoer := MakeMultipleOutputsDoFunc(func(inputRecord *store.Record, outputChans ...chan *store.Record) { outputChans[0] <- &store.Record{ Key: inputRecord.Key, Value: bytes.Join([][]byte{inputRecord.Value, inputRecord.Value}, []byte(",")), } outputChans[1] <- &store.Record{ Key: inputRecord.Key, Value: bytes.Join([][]byte{inputRecord.Value, inputRecord.Value}, []byte(";")), } }, 2) inputChan := make(chan *store.Record, 3) inputChan <- store.NewRecord("a", "b", 0) inputChan <- store.NewRecord("c", "d", 0) inputChan <- store.NewRecord("e", "f", 0) close(inputChan) outputChan := make(chan *store.Record, 6) multiDoer.Do(inputChan, outputChan) close(outputChan) for record := range outputChan { fmt.Printf("[%d] %s: %s\n", record.DatabaseIndex, record.Key, record.Value) } // Output: // [0] a: b,b // [1] a: b;b // [0] c: d,d // [1] c: d;d // [0] e: f,f // [1] e: f;f }
func ExampleDoer() { doer := MakeDoFunc(func(inputRecord *store.Record, outputChan chan *store.Record) { outputChan <- &store.Record{ Key: inputRecord.Key, Value: bytes.Join([][]byte{inputRecord.Value, inputRecord.Value}, []byte(",")), } outputChan <- &store.Record{ Key: inputRecord.Key, Value: bytes.Join([][]byte{inputRecord.Value, inputRecord.Value}, []byte(";")), } }) inputChan := make(chan *store.Record, 3) inputChan <- store.NewRecord("a", "b", 0) inputChan <- store.NewRecord("c", "d", 0) inputChan <- store.NewRecord("e", "f", 0) close(inputChan) outputChan := make(chan *store.Record, 6) doer.Do(inputChan, outputChan) close(outputChan) for record := range outputChan { fmt.Printf("%s: %s\n", record.Key, record.Value) } // Output: // a: b,b // a: b;b // c: d,d // c: d;d // e: f,f // e: f;f }
// NewResult builds a result from a slice of grep.Match. func MakeResult(path string, re *regexp.Regexp, grepMatches []grep.Match) Result { var matches []Match for _, m := range grepMatches { start := m.LineNum - len(m.ContextBefore) snippetBefore := string(bytes.Join(m.ContextBefore, []byte{'\n'})) if len(m.ContextBefore) > 0 { snippetBefore += "\n" } // Find the exact match on the matching line. i := re.FindIndex(m.FullLine) snippetBefore += string(m.FullLine[:i[0]]) snippetMatch := string(m.FullLine[i[0]:i[1]]) snippetAfter := string(m.FullLine[i[1]:]) if len(m.ContextAfter) > 0 { snippetAfter += "\n" + string(bytes.Join(m.ContextAfter, []byte{'\n'})) } matches = append(matches, Match{ Start: start, SnippetBefore: snippetBefore, SnippetMatch: snippetMatch, SnippetAfter: snippetAfter, }) } return Result{ Path: path, Matches: matches, } }
// WrapBytes wraps b into a paragraph of lines of length lim, with minimal // raggedness. func WrapBytes(b []byte, lim int) []byte { words := bytes.Split(bytes.Replace(bytes.TrimSpace(b), nl, sp, -1), sp) var lines [][]byte for _, line := range WrapWords(words, 1, lim, defaultPenalty) { lines = append(lines, bytes.Join(line, sp)) } return bytes.Join(lines, nl) }
// Encode combine all payload received into a json array. func (q Queue) Encode() ([]byte, error) { vals := [][]byte{ []byte("["), bytes.Join(q, []byte(",")), []byte("]"), } return bytes.Join(vals, nil), nil }
func (self *pieceHasher) Pieces() []byte { piecesData := bytes.Join(self.PieceHashes, []byte("")) if len(self.PieceBuffer) > 0 { hasher := sha1.New() hasher.Write(self.PieceBuffer) piecesData = bytes.Join([][]byte{piecesData, hasher.Sum(nil)}, []byte("")) } return piecesData }
func (r *Reader) commentMetaline(line []byte) (f feat.Feature, err error) { fields := bytes.Split(line, []byte{' '}) if len(fields) < 1 { return nil, &csv.ParseError{Line: r.line, Err: ErrEmptyMetaLine} } switch unsafeString(fields[0]) { case "gff-version": v := mustAtoi(fields, 1, r.line) if v > Version { return nil, &csv.ParseError{Line: r.line, Err: ErrNotHandled} } r.Version = Version return r.Read() case "source-version": if len(fields) <= 1 { return nil, &csv.ParseError{Line: r.line, Err: ErrBadMetaLine} } r.SourceVersion = string(bytes.Join(fields[1:], []byte{' '})) return r.Read() case "date": if len(fields) <= 1 { return nil, &csv.ParseError{Line: r.line, Err: ErrBadMetaLine} } if len(r.TimeFormat) > 0 { r.Date, err = time.Parse(r.TimeFormat, unsafeString(bytes.Join(fields[1:], []byte{' '}))) if err != nil { return nil, err } } return r.Read() case "Type", "type": if len(fields) <= 1 { return nil, &csv.ParseError{Line: r.line, Err: ErrBadMetaLine} } r.Type = feat.ParseMoltype(unsafeString(fields[1])) if len(fields) > 2 { r.Name = string(fields[2]) } return r.Read() case "sequence-region": if len(fields) <= 3 { return nil, &csv.ParseError{Line: r.line, Err: ErrBadMetaLine} } return &Region{ Sequence: Sequence{SeqName: string(fields[1]), Type: r.Type}, RegionStart: feat.OneToZero(mustAtoi(fields, 2, r.line)), RegionEnd: mustAtoi(fields, 3, r.line), }, nil case "DNA", "RNA", "Protein", "dna", "rna", "protein": if len(fields) <= 1 { return nil, &csv.ParseError{Line: r.line, Err: ErrBadMetaLine} } return r.metaSeq(fields[0], fields[1]) default: return nil, &csv.ParseError{Line: r.line, Err: ErrNotHandled} } }
func Read(r io.Reader) (ans []gene.Gene, err error) { var ( f, t, i int c byte ) buf := bufio.NewReader(r) mdi := make([][]byte, 0) ans = make([]gene.Gene, 0, 10) for i = -1; ; { line, err := buf.ReadBytes('\n') if err != nil && err != io.EOF { return nil, err } line = bytes.TrimSpace(line) if len(line) != 0 { if line[0] == '>' { if i >= 0 { ans[i].Seq = bytes.Join(mdi, nil) mdi = mdi[0:0] } ans = append(ans, gene.Gene{gene.Sequence{string(line[1:]), nil}, nil}) i++ line, err := buf.ReadBytes('\n') if err != nil { return nil, err } R := bytes.NewReader(line) for { _, err := fmt.Fscanf(R, "%d..%d%c", &f, &t, &c) if err != nil { return nil, err } ans[i].Seg = append(ans[i].Seg, [2]int{f - 1, t}) if c == '\r' || c == '\n' { break } if c != ',' { return nil, fmt.Errorf("Range read error") } } } else { mdi = append(mdi, line) } } if err != nil { break } } ans[i].Seq = bytes.Join(mdi, nil) for ; i >= 0; i-- { l := len(ans[i].Seq) - 1 if ans[i].Seq[l] == '*' { ans[i].Seq = ans[i].Seq[:l] } } return ans, nil }
func main() { a := [][]byte{ []byte("hello"), []byte("world"), } fmt.Println(string(bytes.Join(a, []byte(", ")))) fmt.Println(string(bytes.Join(a, []byte{}))) fmt.Println(string(bytes.Join(a, nil))) }
// Bytes returns a byte slice representation of Message per protocol. func (c *Message) Bytes() []byte { var b []byte if c.arg != nil { b = bytes.Join([][]byte{[]byte{c.cmd}, c.key, c.arg}, []byte{SEP}) } else { b = bytes.Join([][]byte{[]byte{c.cmd}, c.key}, []byte{SEP}) } return append(b, EOM) }
func (m Message) Bytes() []byte { sd := m.StructuredData if len(sd) == 0 { sd = nilValue } if len(m.Msg) > 0 { return bytes.Join([][]byte{m.Header.Bytes(), sd, m.Msg}, msgSep) } return bytes.Join([][]byte{m.Header.Bytes(), sd}, msgSep) }
func parseCommit(r io.Reader, resultSize string, name SHA) (Commit, error) { var commit = Commit{_type: "commit", size: resultSize} scnr := bufio.NewScanner(r) scnr.Split(ScanLinesNoTrim) var commitMessageLines [][]byte for scnr.Scan() { line := scnr.Bytes() trimmedLine := bytes.TrimRight(line, "\r\n") if commitMessageLines == nil && len(bytes.Fields(trimmedLine)) == 0 { // Everything after the first empty line is the commit message commitMessageLines = [][]byte{} continue } if commitMessageLines != nil { // We have already seen an empty line commitMessageLines = append(commitMessageLines, line) continue } parts := bytes.Fields(trimmedLine) key := parts[0] switch keyType(key) { case treeKey: commit.Tree = string(parts[1]) case parentKey: commit.Parents = append(commit.Parents, SHA(string(parts[1]))) case authorKey: authorline := string(bytes.Join(parts[1:], []byte(" "))) author, date, err := parseAuthorString(authorline) if err != nil { return commit, err } commit.Author = author commit.AuthorDate = date case committerKey: committerline := string(bytes.Join(parts[1:], []byte(" "))) committer, date, err := parseCommitterString(committerline) if err != nil { return commit, err } commit.Committer = committer commit.CommitterDate = date default: err := fmt.Errorf("encountered unknown field in commit: %s", key) return commit, err } } commit.Name = name commit.Message = bytes.Join(commitMessageLines, []byte("\n")) return commit, nil }
func (p *AuthDbusCookieSha1) ProcessData(mesg []byte) ([]byte, error) { decodedLen, err := hex.Decode(mesg, mesg) if err != nil { return nil, err } mesgTokens := bytes.SplitN(mesg[:decodedLen], []byte(" "), 3) file, err := os.Open(os.Getenv("HOME") + "/.dbus-keyrings/" + string(mesgTokens[0])) if err != nil { return nil, err } defer file.Close() fileStream := bufio.NewReader(file) var cookie []byte for { line, _, err := fileStream.ReadLine() if err == io.EOF { return nil, errors.New("SHA1 Cookie not found") } else if err != nil { return nil, err } cookieTokens := bytes.SplitN(line, []byte(" "), 3) if bytes.Compare(cookieTokens[0], mesgTokens[1]) == 0 { cookie = cookieTokens[2] break } } challenge := make([]byte, len(mesgTokens[2])) if _, err = rand.Read(challenge); err != nil { return nil, err } for temp := challenge; ; { if index := bytes.IndexAny(temp, " \t"); index == -1 { break } else if _, err := rand.Read(temp[index : index+1]); err != nil { return nil, err } else { temp = temp[index:] } } hash := sha1.New() if _, err := hash.Write(bytes.Join([][]byte{mesgTokens[2], challenge, cookie}, []byte(":"))); err != nil { return nil, err } resp := bytes.Join([][]byte{challenge, []byte(hex.EncodeToString(hash.Sum(nil)))}, []byte(" ")) respHex := make([]byte, hex.EncodedLen(len(resp))) hex.Encode(respHex, resp) return append([]byte("DATA "), respHex...), nil }
// Bytes returns byte slice of RequestHeader func (r *RequestHeader) Bytes() []byte { lines := [][]byte{} lines = append(lines, bytes.Join(r.ReqLineTokens, []byte{' '})) for _, h := range r.Headers { hline := bytes.Join(h, []byte{':', ' '}) lines = append(lines, hline) } out := bytes.Join(lines, eol) out = append(out, eoh...) return out }
// Read a single sequence and return it or an error. // TODO: Does not read interleaved fastq. func (self *Reader) Read() (sequence *seq.Seq, err error) { if self.r == nil { self.r = bufio.NewReader(self.f) } var line, label, seqBody, qualBody []byte sequence = &seq.Seq{} inQual := false READ: for { if line, err = self.r.ReadBytes('\n'); err == nil { if len(line) > 0 && line[len(line)-1] == '\r' { line = line[:len(line)-1] } line = bytes.TrimSpace(line) if len(line) == 0 { continue } switch { case !inQual && line[0] == '@': label = line[1:] case !inQual && line[0] == '+': if len(label) == 0 { return nil, errors.New("No ID line parsed at +line in fastq format") } if len(line) > 1 && bytes.Compare(label, line[1:]) != 0 { return nil, errors.New("Quality ID does not match sequence ID") } inQual = true case !inQual: line = bytes.Join(bytes.Fields(line), nil) seqBody = append(seqBody, line...) case inQual: line = bytes.Join(bytes.Fields(line), nil) qualBody = append(qualBody, line...) if len(qualBody) >= len(seqBody) { break READ } } } else { return } } if len(seqBody) != len(qualBody) { return nil, errors.New("Quality length does not match sequence length") } sequence = seq.New(label, seqBody, qualBody) return }
func getSrpClientPublicBytes(clientPublic *big.Int) (bs []byte) { b := bytes.NewBufferString(hex.EncodeToString(bigToBytes(clientPublic))).Bytes() if len(b) > 254 { bs = bytes.Join([][]byte{ []byte{CNCT_specific_data, byte(255), 0}, b[:254], []byte{CNCT_specific_data, byte(len(b)-254) + 1, 1}, b[254:], }, nil) } else { bs = bytes.Join([][]byte{ []byte{CNCT_specific_data, byte(len(b)) + 1, 0}, b, }, nil) } return bs }
// http://stackoverflow.com/questions/27602013/correct-way-to-get-region-name-by-using-hbase-api func (c *client) createRegionName(table, startKey []byte, id string, newFormat bool) []byte { if len(startKey) == 0 { startKey = make([]byte, 1) } b := bytes.Join([][]byte{table, startKey, []byte(id)}, []byte(",")) if newFormat { m := md5.Sum(b) mhex := []byte(hex.EncodeToString(m[:])) b = append(bytes.Join([][]byte{b, mhex}, []byte(".")), []byte(".")...) } return b }
func (idx *SuffixIndex) sort() { if idx.index == nil { keys := make([][]byte, len(idx.tiles)) i := 0 for k := range idx.tiles { keys[i] = []byte(k) i++ } d := []byte{zero} b := bytes.Join(keys, d) //join w/ zeros idx.indexed = bytes.Join([][]byte{d, d}, b) //pad w/ zeros idx.index = suffixarray.New(idx.indexed) } }
func SignatureSignScript(userId string, script []byte) ([]byte, error) { // Make script with headers userInfoJS, err := getUserAttribute(userId, "user_info") if err != nil { return nil, err } userInfo := map[string]string{} err = json.Unmarshal([]byte(userInfoJS), &userInfo) if err != nil { return nil, err } userProfileLink := userInfo["link"] if userProfileLink == "" { return nil, errors.New("Can't get user link") } parts := [][]byte{} parts = append(parts, []byte("<!--")) parts = append(parts, []byte(userProfileLink+"-->\n")) parts = append(parts, script) script = bytes.Join(parts, []byte("")) // Sign script w/ header // TODO: Cache privateKey var priv dsa.PrivateKey privJS, err := getUserAttribute("", "private_key") if err != nil { return nil, err } err = json.Unmarshal([]byte(privJS), &priv) if err != nil { return nil, err } r, s, err := SignatureSign(hashScript(script), &priv) if err != nil { return nil, err } ss := ScriptSignature{R: string(r), S: string(s), V: 0, H: fullUrl} ssJS, err := json.Marshal(ss) if err != nil { return nil, err } parts = [][]byte{} parts = append(parts, []byte("<!--")) parts = append(parts, ssJS) parts = append(parts, []byte("-->\n")) parts = append(parts, script) return bytes.Join(parts, []byte("")), nil }
func (c *Component) encode(seps *Delimeters) []byte { buf := [][]byte{} for _, sc := range c.SubComponents { buf = append(buf, sc.Value) } return bytes.Join(buf, []byte(string(seps.SubComponent))) }