func (s *SuiteScanner) TestScanAndPayload(c *C) { for _, test := range [...]string{ "a", "a\n", strings.Repeat("a", 100), strings.Repeat("a", 100) + "\n", strings.Repeat("\x00", 100), strings.Repeat("\x00", 100) + "\n", strings.Repeat("a", pktline.MaxPayloadSize), strings.Repeat("a", pktline.MaxPayloadSize-1) + "\n", } { var buf bytes.Buffer e := pktline.NewEncoder(&buf) err := e.EncodeString(test) c.Assert(err, IsNil, Commentf("input len=%x, contents=%.10q\n", len(test), test)) sc := pktline.NewScanner(&buf) c.Assert(sc.Scan(), Equals, true, Commentf("test = %.20q...", test)) obtained := sc.Bytes() c.Assert(obtained, DeepEquals, []byte(test), Commentf("in = %.20q out = %.20q", test, string(obtained))) } }
// returns a byte slice with the pkt-lines for the given payloads. func pktlines(c *C, payloads ...[]byte) []byte { var buf bytes.Buffer e := pktline.NewEncoder(&buf) err := e.Encode(payloads...) c.Assert(err, IsNil, Commentf("building pktlines for %v\n", payloads)) return buf.Bytes() }
func toPktLines(c *C, payloads []string) io.Reader { var buf bytes.Buffer e := pktline.NewEncoder(&buf) err := e.EncodeString(payloads...) c.Assert(err, IsNil) return &buf }
func (s *SuiteScanner) TestEOF(c *C) { var buf bytes.Buffer e := pktline.NewEncoder(&buf) err := e.EncodeString("first", "second") c.Assert(err, IsNil) sc := pktline.NewScanner(&buf) for sc.Scan() { } c.Assert(sc.Err(), IsNil) }
func (s *SuiteScanner) TestFlush(c *C) { var buf bytes.Buffer e := pktline.NewEncoder(&buf) err := e.Flush() c.Assert(err, IsNil) sc := pktline.NewScanner(&buf) c.Assert(sc.Scan(), Equals, true) payload := sc.Bytes() c.Assert(len(payload), Equals, 0) }
func (s *SuiteDecodeEncode) test(c *C, in []string, exp []string) { var err error var input io.Reader { var buf bytes.Buffer p := pktline.NewEncoder(&buf) err = p.EncodeString(in...) c.Assert(err, IsNil) input = &buf } var expected []byte { var buf bytes.Buffer p := pktline.NewEncoder(&buf) err = p.EncodeString(exp...) c.Assert(err, IsNil) expected = buf.Bytes() } var obtained []byte { ar := advrefs.New() d := advrefs.NewDecoder(input) err = d.Decode(ar) c.Assert(err, IsNil) var buf bytes.Buffer e := advrefs.NewEncoder(&buf) err := e.Encode(ar) c.Assert(err, IsNil) obtained = buf.Bytes() } c.Assert(obtained, DeepEquals, expected, Commentf("input = %v\nobtained = %q\nexpected = %q\n", in, string(obtained), string(expected))) }
func testDecodeOK(c *C, payloads []string) *advrefs.AdvRefs { var buf bytes.Buffer e := pktline.NewEncoder(&buf) err := e.EncodeString(payloads...) c.Assert(err, IsNil) ar := advrefs.New() d := advrefs.NewDecoder(&buf) err = d.Decode(ar) c.Assert(err, IsNil) return ar }
// returns nSection sections, each of them with nLines pkt-lines (not // counting the flush-pkt: // // 0009 0.0\n // 0009 0.1\n // ... // 0000 // and so on func sectionsExample(c *C, nSections, nLines int) io.Reader { var buf bytes.Buffer e := pktline.NewEncoder(&buf) for section := 0; section < nSections; section++ { ss := []string{} for line := 0; line < nLines; line++ { line := fmt.Sprintf(" %d.%d\n", section, line) ss = append(ss, line) } err := e.EncodeString(ss...) c.Assert(err, IsNil) err = e.Flush() c.Assert(err, IsNil) } return &buf }
func (s *SuiteScanner) TestSkip(c *C) { for _, test := range [...]struct { input []string n int expected []byte }{ { input: []string{ "first", "second", "third"}, n: 1, expected: []byte("second"), }, { input: []string{ "first", "second", "third"}, n: 2, expected: []byte("third"), }, } { var buf bytes.Buffer e := pktline.NewEncoder(&buf) err := e.EncodeString(test.input...) c.Assert(err, IsNil) sc := pktline.NewScanner(&buf) for i := 0; i < test.n; i++ { c.Assert(sc.Scan(), Equals, true, Commentf("scan error = %s", sc.Err())) } c.Assert(sc.Scan(), Equals, true, Commentf("scan error = %s", sc.Err())) obtained := sc.Bytes() c.Assert(obtained, DeepEquals, test.expected, Commentf("\nin = %.20q\nout = %.20q\nexp = %.20q", test.input, obtained, test.expected)) } }
func (r *GitUploadPackRequest) Reader() *strings.Reader { var buf bytes.Buffer e := pktline.NewEncoder(&buf) for _, want := range r.Wants { _ = e.Encodef("want %s\n", want) } for _, have := range r.Haves { _ = e.Encodef("have %s\n", have) } if r.Depth != 0 { _ = e.Encodef("deepen %d\n", r.Depth) } _ = e.Flush() _ = e.EncodeString("done\n") return strings.NewReader(buf.String()) }
func (s *Server) doUploadPackResponse(w http.ResponseWriter, r *http.Request) { pkg := s.buildPackage(r) fetcher := NewFetcher(pkg, getAuth(r)) ref, err := s.getVersion(fetcher, pkg) if err != nil { s.handleError(w, r, err) return } w.Header().Set("Content-Type", "application/x-git-upload-pack-result") pkt := pktline.NewEncoder(w) if err := pkt.EncodeString("NAK\n"); err != nil { s.handleError(w, r, err) return } _, err = fetcher.Fetch(w, ref) if err != nil { s.handleError(w, r, err) return } }
func (i *GitUploadPackInfo) Bytes() []byte { var buf bytes.Buffer e := pktline.NewEncoder(&buf) _ = e.EncodeString("# service=git-upload-pack\n") // inserting a flush-pkt here violates the protocol spec, but some // servers do it, like Github.com e.Flush() _ = e.Encodef("%s HEAD\x00%s\n", i.Head().Hash(), i.Capabilities.String()) for _, ref := range i.Refs { if ref.Type() != plumbing.HashReference { continue } _ = e.Encodef("%s %s\n", ref.Hash(), ref.Name()) } e.Flush() return buf.Bytes() }
// NewEncoder returns a new encoder that writes to w. func NewEncoder(w io.Writer) *Encoder { return &Encoder{ pe: pktline.NewEncoder(w), } }