func (g *Generator) generateOperation(b *descriptor.Binding) *spec.Operation { response := new(spec.Response) responses := new(spec.Responses) op := new(spec.Operation) op.Tags = make([]string, 0) op.Parameters = make([]spec.Parameter, 0) op.ID = strings.ToLower(b.Method.GetName()) // op.Consumes = []string{"application/x-www-form-urlencoded", "application/json", "application/x-protobuf"} // op.Produces = []string{"application/json", "application/x-protobuf"} if cmt := g.file.GetCommentText(b.Method.CommentPath); cmt != nil { // TODO(ceram1): Cut by newline to extract summary, like golang. op.Summary = *cmt _ = strings.Index(*cmt, "\n\n") // idx to cut comment. op.Description = *cmt } for _, param := range b.PathParams { p := g.generatePathParameter(¶m) op.Parameters = append(op.Parameters, *p) } response.Ref = messageRef(b.Method.ResponseType) responses.Default = response op.Responses = responses return op }
func (pp *paramStructParser) parseStructType(gofile *ast.File, operation *spec.Operation, tpe *ast.StructType, seenPreviously map[string]spec.Parameter) error { if tpe.Fields != nil { pt := seenPreviously for _, fld := range tpe.Fields.List { if len(fld.Names) == 0 { // when the embedded struct is annotated with swagger:allOf it will be used as allOf property // otherwise the fields will just be included as normal properties if err := pp.parseEmbeddedStruct(gofile, operation, fld.Type, pt); err != nil { return err } } } for _, fld := range tpe.Fields.List { var nm, gnm string if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() { nm = fld.Names[0].Name gnm = nm if fld.Tag != nil && len(strings.TrimSpace(fld.Tag.Value)) > 0 { tv, err := strconv.Unquote(fld.Tag.Value) if err != nil { return err } if strings.TrimSpace(tv) != "" { st := reflect.StructTag(tv) jsonTag := st.Get("json") if jsonTag != "" && jsonTag != "-" { nm = strings.Split(jsonTag, ",")[0] } } } in := "query" // scan for param location first, this changes some behavior down the line if fld.Doc != nil { for _, cmt := range fld.Doc.List { for _, line := range strings.Split(cmt.Text, "\n") { matches := rxIn.FindStringSubmatch(line) if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 { in = strings.TrimSpace(matches[1]) } } } } ps := pt[nm] ps.In = in var pty swaggerTypable = paramTypable{&ps} if in == "body" { pty = schemaTypable{pty.Schema(), 0} } if err := parseProperty(pp.scp, gofile, fld.Type, pty); err != nil { return err } sp := new(sectionedParser) sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) } if ps.Ref.GetURL() == nil { sp.taggers = []tagParser{ newSingleLineTagParser("maximum", &setMaximum{paramValidations{&ps}, rxf(rxMaximumFmt, "")}), newSingleLineTagParser("minimum", &setMinimum{paramValidations{&ps}, rxf(rxMinimumFmt, "")}), newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{&ps}, rxf(rxMultipleOfFmt, "")}), newSingleLineTagParser("minLength", &setMinLength{paramValidations{&ps}, rxf(rxMinLengthFmt, "")}), newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{&ps}, rxf(rxMaxLengthFmt, "")}), newSingleLineTagParser("pattern", &setPattern{paramValidations{&ps}, rxf(rxPatternFmt, "")}), newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{&ps}, rxf(rxCollectionFormatFmt, "")}), newSingleLineTagParser("minItems", &setMinItems{paramValidations{&ps}, rxf(rxMinItemsFmt, "")}), newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{&ps}, rxf(rxMaxItemsFmt, "")}), newSingleLineTagParser("unique", &setUnique{paramValidations{&ps}, rxf(rxUniqueFmt, "")}), newSingleLineTagParser("required", &setRequiredParam{&ps}), newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}), } itemsTaggers := func(items *spec.Items, level int) []tagParser { // the expression is 1-index based not 0-index itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) return []tagParser{ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}), } } // check if this is a primitive, if so parse the validations from the // doc comments of the slice declaration. if ftped, ok := fld.Type.(*ast.ArrayType); ok { ftpe := ftped items, level := ps.Items, 0 for items != nil { switch iftpe := ftpe.Elt.(type) { case *ast.ArrayType: eleTaggers := itemsTaggers(items, level) sp.taggers = append(eleTaggers, sp.taggers...) ftpe = iftpe case *ast.Ident: if iftpe.Obj == nil { sp.taggers = append(itemsTaggers(items, level), sp.taggers...) } break default: return fmt.Errorf("unknown field type ele for %q", nm) } items = items.Items level = level + 1 } } } else { sp.taggers = []tagParser{ newSingleLineTagParser("required", &matchOnlyParam{&ps, rxRequired}), newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}), } } if err := sp.Parse(fld.Doc); err != nil { return err } if ps.Name == "" { ps.Name = nm } if nm != gnm { ps.AddExtension("x-go-name", gnm) } pt[nm] = ps } } for k, p := range pt { for i, v := range operation.Parameters { if v.Name == k { operation.Parameters = append(operation.Parameters[:i], operation.Parameters[i+1:]...) break } } operation.Parameters = append(operation.Parameters, p) } } return nil }
func buildOperation(route *revel.Route) *spec.Operation { var ( typeInfo *harness.TypeInfo methodSpec *harness.MethodSpec ) info, rerr := harness.ProcessSource(revel.CodePaths) if rerr != nil { panic(rerr) // TODO EMPTY PANIC } // get the TypeInfo and MethodSpec for this route for _, cinfo := range info.ControllerSpecs() { typeInfo = cinfo // TODO move inside if (get around compiler complaint) if route.ControllerName == typeInfo.StructName { for _, spec := range cinfo.MethodSpecs { if route.MethodName == spec.Name { methodSpec = spec break } } break } } op := new(spec.Operation) // TODO op.Description // this will probably require either editing harness.ProcessSource to also grab comments OR // to copy that functionality and modify it op.Consumes = ContentTypes op.Produces = ContentTypes op.AddExtension("x-revel-action", route.Action) for i, arg := range methodSpec.Args { // skip over fixed paramters that match up to the arguments if i < len(route.FixedParams) { continue } var param spec.Parameter param.Name = arg.Name param.Type = arg.TypeExpr.Expr // TODO review // TODO: better path vs query vs body vs multipart count := strings.Count(route.Path, ":") + strings.Count(route.Path, "*") if i < count { param.In = "path" } else { param.In = "body" } op.Parameters = append(op.Parameters, param) } // TODO RenderCalls // fmt.Printf("route: %#v\n", route) // fmt.Printf("typeInfo: %#v\n", typeInfo) // fmt.Printf("methodSpec: %#v\n", methodSpec) // for _, call := range methodSpec.RenderCalls { // fmt.Printf("\tcall: %#v\n", call) // } /* "responses": { "200": { "description": "A list of pets.", "schema": { "type": "array", "items": { "$ref": "#/definitions/Pet" } } } } */ return op }