func (ss *setOpResponses) Parse(lines []string) error { if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { return nil } var def *spec.Response var scr map[int]spec.Response for _, line := range lines { kv := strings.SplitN(line, ":", 2) var key, value string if len(kv) > 1 { key = strings.TrimSpace(kv[0]) if key == "" { // this must be some weird empty line continue } value = strings.TrimSpace(kv[1]) if value == "" { var resp spec.Response if strings.EqualFold("default", key) { if def == nil { def = &resp } } else { if sc, err := strconv.Atoi(key); err == nil { if scr == nil { scr = make(map[int]spec.Response) } scr[sc] = resp } } continue } var arrays int for strings.HasPrefix(value, "[]") { arrays++ value = value[2:] } var isDefinitionRef bool var ref spec.Ref var err error if arrays == 0 { if strings.HasPrefix(value, "body:") { isDefinitionRef = true ref, err = spec.NewRef("#/definitions/" + value[5:]) } else { ref, err = spec.NewRef("#/responses/" + value) } } else { isDefinitionRef = true ref, err = spec.NewRef("#/definitions/" + value) } if err != nil { return err } if _, ok := ss.responses[value]; !ok { if _, ok := ss.definitions[value]; ok { isDefinitionRef = true ref, err = spec.NewRef("#/definitions/" + value) } } else { } if err != nil { return err } var resp spec.Response if !isDefinitionRef { resp.Ref = ref } else { resp.Schema = new(spec.Schema) if arrays == 0 { resp.Schema.Ref = ref } else { cs := resp.Schema for i := 0; i < arrays; i++ { cs.Typed("array", "") cs.Items = new(spec.SchemaOrArray) cs.Items.Schema = new(spec.Schema) cs = cs.Items.Schema } cs.Ref = ref } } if strings.EqualFold("default", key) { if def == nil { def = &resp } } else { if sc, err := strconv.Atoi(key); err == nil { if scr == nil { scr = make(map[int]spec.Response) } scr[sc] = resp } } } } ss.set(def, scr) return nil }
func (rp *responseParser) parseStructType(gofile *ast.File, response *spec.Response, tpe *ast.StructType, seenPreviously map[string]struct{}) error { if tpe.Fields != nil { seenProperties := seenPreviously for _, fld := range tpe.Fields.List { if len(fld.Names) == 0 { // when the embedded struct is annotated with swagger:allOf it will be used as allOf property // otherwise the fields will just be included as normal properties if err := rp.parseEmbeddedStruct(gofile, response, fld.Type, seenProperties); err != nil { return err } } } for _, fld := range tpe.Fields.List { var nm string if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() { nm = fld.Names[0].Name if fld.Tag != nil && len(strings.TrimSpace(fld.Tag.Value)) > 0 { tv, err := strconv.Unquote(fld.Tag.Value) if err != nil { return err } if strings.TrimSpace(tv) != "" { st := reflect.StructTag(tv) if st.Get("json") != "" { nm = strings.Split(st.Get("json"), ",")[0] } } } var in string // scan for param location first, this changes some behavior down the line if fld.Doc != nil { for _, cmt := range fld.Doc.List { for _, line := range strings.Split(cmt.Text, "\n") { matches := rxIn.FindStringSubmatch(line) if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 { in = strings.TrimSpace(matches[1]) } } } } ps := response.Headers[nm] if err := rp.scp.parseNamedType(gofile, fld.Type, responseTypable{in, &ps, response}); err != nil { return err } sp := new(sectionedParser) sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) } sp.taggers = []tagParser{ newSingleLineTagParser("maximum", &setMaximum{headerValidations{&ps}, rxf(rxMaximumFmt, "")}), newSingleLineTagParser("minimum", &setMinimum{headerValidations{&ps}, rxf(rxMinimumFmt, "")}), newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{&ps}, rxf(rxMultipleOfFmt, "")}), newSingleLineTagParser("minLength", &setMinLength{headerValidations{&ps}, rxf(rxMinLengthFmt, "")}), newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{&ps}, rxf(rxMaxLengthFmt, "")}), newSingleLineTagParser("pattern", &setPattern{headerValidations{&ps}, rxf(rxPatternFmt, "")}), newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{&ps}, rxf(rxCollectionFormatFmt, "")}), newSingleLineTagParser("minItems", &setMinItems{headerValidations{&ps}, rxf(rxMinItemsFmt, "")}), newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{&ps}, rxf(rxMaxItemsFmt, "")}), newSingleLineTagParser("unique", &setUnique{headerValidations{&ps}, rxf(rxUniqueFmt, "")}), } itemsTaggers := func(items *spec.Items, level int) []tagParser { // the expression is 1-index based not 0-index itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) return []tagParser{ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}), } } // check if this is a primitive, if so parse the validations from the // doc comments of the slice declaration. if ftped, ok := fld.Type.(*ast.ArrayType); ok { ftpe := ftped items, level := ps.Items, 0 for items != nil { switch iftpe := ftpe.Elt.(type) { case *ast.ArrayType: eleTaggers := itemsTaggers(items, level) sp.taggers = append(eleTaggers, sp.taggers...) ftpe = iftpe case *ast.Ident: if iftpe.Obj == nil { sp.taggers = append(itemsTaggers(items, level), sp.taggers...) } break default: return fmt.Errorf("unknown field type ele for %q", nm) } items = items.Items level = level + 1 } } if err := sp.Parse(fld.Doc); err != nil { return err } if in != "body" { seenProperties[nm] = struct{}{} if response.Headers == nil { response.Headers = make(map[string]spec.Header) } response.Headers[nm] = ps } } } for k := range response.Headers { if _, ok := seenProperties[k]; !ok { delete(response.Headers, k) } } } return nil }
func (g *swaggerGenerator) generateSwaggerOperation(test IApiTest, defs spec.Definitions) (spec.Operation, error) { op := spec.Operation{} op.Responses = &spec.Responses{} op.Responses.StatusCodeResponses = map[int]spec.Response{} var description string processedQueryParams := map[string]interface{}{} processedPathParams := map[string]interface{}{} processedHeaderParams := map[string]interface{}{} for _, testCase := range test.TestCases() { // parameter definitions are collected from 2xx tests only if testCase.ExpectedHttpCode >= 200 && testCase.ExpectedHttpCode < 300 { description = testCase.Description for key, param := range testCase.Headers { if _, ok := processedHeaderParams[key]; ok { continue } specParam, err := generateSwaggerSpecParam(key, param, "header") if err != nil { return op, err } processedHeaderParams[key] = nil op.Parameters = append(op.Parameters, specParam) } for key, param := range testCase.PathParams { if _, ok := processedPathParams[key]; ok { continue } param.Required = true // path parameters are always required specParam, err := generateSwaggerSpecParam(key, param, "path") if err != nil { return op, err } processedPathParams[key] = nil op.Parameters = append(op.Parameters, specParam) } for key, param := range testCase.QueryParams { if _, ok := processedQueryParams[key]; ok { continue } specParam, err := generateSwaggerSpecParam(key, param, "query") if err != nil { return op, err } processedQueryParams[key] = nil op.Parameters = append(op.Parameters, specParam) } if testCase.RequestBody != nil { specParam := spec.Parameter{} specParam.Name = "body" specParam.In = "body" specParam.Required = true // TODO: right now it supports json, but should support marshaller depending on MIME type if content, err := json.MarshalIndent(testCase.RequestBody, "", " "); err == nil { specParam.Description = string(content) } specParam.Schema = generateSpecSchema(testCase.RequestBody, defs) op.Parameters = append(op.Parameters, specParam) } } response := spec.Response{} response.Description = testCase.Description if testCase.ExpectedData != nil { response.Schema = generateSpecSchema(testCase.ExpectedData, defs) response.Examples = map[string]interface{}{ "application/json": testCase.ExpectedData, } } op.Responses.StatusCodeResponses[testCase.ExpectedHttpCode] = response } op.Summary = description if taggable, ok := test.(ITaggable); ok { op.Tags = []string{taggable.Tag()} } return op, nil }