func (g *Generator) generateProperty(f *descriptor.Field) *spec.Schema { sc := new(spec.Schema) sc.ExtraProps = make(map[string]interface{}) if cmt := g.file.GetCommentText(f.CommentPath); cmt != nil { sc.Description = *cmt // glog.Fatalf("Comment: %s, %s", f.CommentPath, *cmt) } else { // glog.Fatalf("Comment: %s, %s", f.CommentPath, f.GetName()) } // Handle $ref setSchemaType(sc, f) // default value if exists if f.DefaultValue != nil { sc.Default = f.GetDefaultValue() } // export protobuf id // sc.ExtraProps["protobufId"] = f.GetNumber() // // if f.Options != nil { // sc.ExtraProps["ext_options"] = f.GetOptions() // } // // sc.ExtraProps["ext_extendee"] = f.GetExtendee() // sc.ExtraProps["ext_oneOfIndex"] = f.GetOneofIndex() return sc }
func TestTypeResolver_AnonymousStructs(t *testing.T) { _, resolver, err := basicTaskListResolver(t) if assert.NoError(t, err) { // anonymous structs should be accounted for parent := new(spec.Schema) parent.Typed("object", "") parent.Properties = make(map[string]spec.Schema) parent.Properties["name"] = *spec.StringProperty() parent.Properties["age"] = *spec.Int32Property() rt, err := resolver.ResolveSchema(parent, true) if assert.NoError(t, err) { assert.False(t, rt.IsNullable) assert.True(t, rt.IsAnonymous) assert.True(t, rt.IsComplexObject) } parent.Extensions = make(spec.Extensions) parent.Extensions["x-isnullable"] = true rt, err = resolver.ResolveSchema(parent, true) if assert.NoError(t, err) { assert.True(t, rt.IsNullable) assert.True(t, rt.IsAnonymous) assert.True(t, rt.IsComplexObject) } } }
func setSchemaType(s *spec.Schema, f *descriptor.Field) { if f.GetLabel() == godesc.FieldDescriptorProto_LABEL_REPEATED { s.Type = spec.StringOrArray([]string{"array"}) items := new(spec.Schema) if f.GetType() == godesc.FieldDescriptorProto_TYPE_MESSAGE { items.Ref = messageRef(f.Message) } else { ty, format := toSwaggerType(f.GetType()) items.Type = spec.StringOrArray([]string{ty}) items.Format = format } s.Items = &spec.SchemaOrArray{Schema: items} return } if f.GetType() == godesc.FieldDescriptorProto_TYPE_MESSAGE { s.Ref = messageRef(f.Message) return } ty, format := toSwaggerType(f.GetType()) s.Type = spec.StringOrArray([]string{ty}) s.Format = format }
func bodyTypable(in string, schema *spec.Schema) (swaggerTypable, *spec.Schema) { if in == "body" { // get the schema for items on the schema property if schema == nil { schema = new(spec.Schema) } if schema.Items == nil { schema.Items = new(spec.SchemaOrArray) } if schema.Items.Schema == nil { schema.Items.Schema = new(spec.Schema) } schema.Typed("array", "") return schemaTypable{schema.Items.Schema, 0}, schema } return nil, nil }
func TestTypeResolver_AdditionalItems(t *testing.T) { _, resolver, err := basicTaskListResolver(t) tpe := spec.StringProperty() if assert.NoError(t, err) { // arrays of primitives and string formats with additional formats for _, val := range schTypeVals { var sch spec.Schema sch.Typed(val.Type, val.Format) var coll spec.Schema coll.Type = []string{"array"} coll.Items = new(spec.SchemaOrArray) coll.Items.Schema = tpe coll.AdditionalItems = new(spec.SchemaOrBool) coll.AdditionalItems.Schema = &sch rt, err := resolver.ResolveSchema(&coll, true) if assert.NoError(t, err) && assert.True(t, rt.IsArray) { assert.True(t, rt.HasAdditionalItems) assert.False(t, rt.IsNullable) //if assert.NotNil(t, rt.ElementType) { //assertPrimitiveResolve(t, "string", "", "string", *rt.ElementType) //} } } } }
func (scp *schemaParser) parseAllOfMember(gofile *ast.File, schema *spec.Schema, expr ast.Expr, seenPreviously map[string]struct{}) error { // TODO: check if struct is annotated with swagger:model or known in the definitions otherwise var pkg *loader.PackageInfo var file *ast.File var gd *ast.GenDecl var ts *ast.TypeSpec var err error switch tpe := expr.(type) { case *ast.Ident: // do lookup of type // take primitives into account, they should result in an error for swagger pkg, err = scp.packageForFile(gofile) if err != nil { return err } file, gd, ts, err = findSourceFile(pkg, tpe.Name) if err != nil { return err } case *ast.SelectorExpr: // look up package, file and then type pkg, err = scp.packageForSelector(gofile, tpe.X) if err != nil { return fmt.Errorf("embedded struct: %v", err) } file, gd, ts, err = findSourceFile(pkg, tpe.Sel.Name) if err != nil { return fmt.Errorf("embedded struct: %v", err) } default: return fmt.Errorf("unable to resolve allOf member for: %v\n", expr) } sd := newSchemaDecl(file, gd, ts) if sd.hasAnnotation() { ref, err := spec.NewRef("#/definitions/" + sd.Name) if err != nil { return err } schema.Ref = ref scp.postDecls = append(scp.postDecls, *sd) } else { switch st := ts.Type.(type) { case *ast.StructType: return scp.parseStructType(file, schema, st, seenPreviously) case *ast.InterfaceType: return scp.parseInterfaceType(file, schema, st, seenPreviously) } } return nil }
func (sg *schemaGenContext) buildItems() error { presentsAsSingle := sg.Schema.Items != nil && sg.Schema.Items.Schema != nil if presentsAsSingle && sg.Schema.AdditionalItems != nil { // unsure if htis a valid of invalid schema return fmt.Errorf("single schema (%s) can't have additional items", sg.Name) } if presentsAsSingle { return sg.buildArray() } if sg.Schema.Items == nil { return nil } // This is a tuple, build a new model that represents this if sg.Named { sg.GenSchema.Name = sg.Name sg.GenSchema.GoType = sg.TypeResolver.goTypeName(sg.Name) // swag.ToGoName(sg.Name) //if sg.TypeResolver.ModelsPackage != "" { //sg.GenSchema.GoType = sg.TypeResolver.ModelsPackage + "." + sg.GenSchema.GoType //} for i, s := range sg.Schema.Items.Schemas { elProp := sg.NewTupleElement(&s, i) if err := elProp.makeGenSchema(); err != nil { return err } sg.MergeResult(elProp, false) elProp.GenSchema.Name = "p" + strconv.Itoa(i) sg.GenSchema.Properties = append(sg.GenSchema.Properties, elProp.GenSchema) } return nil } // for an anonoymous object, first build the new object // and then replace the current one with a $ref to the // new tuple object var sch spec.Schema sch.Typed("object", "") sch.Properties = make(map[string]spec.Schema) for i, v := range sg.Schema.Items.Schemas { sch.Required = append(sch.Required, "P"+strconv.Itoa(i)) sch.Properties["P"+strconv.Itoa(i)] = v } sch.AdditionalItems = sg.Schema.AdditionalItems tup := sg.makeNewStruct(sg.GenSchema.Name+"Tuple"+strconv.Itoa(sg.Index), sch) tup.IsTuple = true if err := tup.makeGenSchema(); err != nil { return err } tup.GenSchema.IsTuple = true tup.GenSchema.IsComplexObject = false tup.GenSchema.Title = tup.GenSchema.Name + " a representation of an anonymous Tuple type" tup.GenSchema.Description = "" sg.ExtraSchemas[tup.Name] = tup.GenSchema sg.Schema = *spec.RefProperty("#/definitions/" + tup.Name) if err := sg.makeGenSchema(); err != nil { return err } sg.MergeResult(tup, false) return nil }
func TestTypeResolver_TupleTypes(t *testing.T) { _, resolver, err := basicTaskListResolver(t) if assert.NoError(t, err) { // tuple type (items with multiple schemas) parent := new(spec.Schema) parent.Typed("array", "") parent.Items = new(spec.SchemaOrArray) parent.Items.Schemas = append( parent.Items.Schemas, *spec.StringProperty(), *spec.Int64Property(), *spec.Float64Property(), *spec.BoolProperty(), *spec.ArrayProperty(spec.StringProperty()), *spec.RefProperty("#/definitions/Comment"), ) rt, err := resolver.ResolveSchema(parent, true) if assert.NoError(t, err) { assert.False(t, rt.IsArray) assert.True(t, rt.IsTuple) } } }
func (g *Generator) generateDefinition(msg *descriptor.Message) *spec.Schema { s := new(spec.Schema) s.Title = msg.GetModelName() s.Properties = make(map[string]spec.Schema) s.Required = make([]string, 0) s.ExtraProps = make(map[string]interface{}) // handle comments. if cmt := g.file.GetCommentText(msg.CommentPath); cmt != nil { s.Description = *cmt } // iterate over fields. for _, field := range msg.Fields { prop := g.generateProperty(field) if field.GetLabel() == godesc.FieldDescriptorProto_LABEL_REQUIRED { s.Required = append(s.Required, field.GetName()) } s.Properties[field.GetName()] = *prop } // if msg.EnumType != nil { // s.ExtraProps["ext_enumType"] = msg.GetEnumType() // } // if msg.Extension != nil { // s.ExtraProps["ext_extension"] = msg.GetExtension() // } // if msg.ExtensionRange != nil { // s.ExtraProps["ext_extensionRange"] = msg.GetExtensionRange() // } // if msg.NestedType != nil { // s.ExtraProps["ext_nestedType"] = msg.GetNestedType() // } // if msg.OneofDecl != nil { // s.ExtraProps["ext_oneofDecl"] = msg.GetOneofDecl() // } // if msg.Options != nil { // s.ExtraProps["ext_options"] = msg.GetOptions() // } return s }
func TestTypeResolver_BasicTypes(t *testing.T) { _, resolver, err := basicTaskListResolver(t) if assert.NoError(t, err) { // primitives and string formats for _, val := range schTypeVals { sch := new(spec.Schema) sch.Typed(val.Type, val.Format) rt, err := resolver.ResolveSchema(sch, true) if assert.NoError(t, err) { assert.False(t, rt.IsNullable) assertPrimitiveResolve(t, val.Type, val.Format, val.Expected, rt) } } // arrays of primitives and string formats for _, val := range schTypeVals { var sch spec.Schema sch.Typed(val.Type, val.Format) rt, err := resolver.ResolveSchema(new(spec.Schema).CollectionOf(sch), true) if assert.NoError(t, err) { assert.True(t, rt.IsArray) } } // primitives and string formats for _, val := range schTypeVals { sch := new(spec.Schema) sch.Typed(val.Type, val.Format) sch.Extensions = make(spec.Extensions) sch.Extensions["x-isnullable"] = true rt, err := resolver.ResolveSchema(sch, true) if assert.NoError(t, err) { assert.True(t, rt.IsNullable, "expected %q (%q) to be nullable", val.Type, val.Format) assertPrimitiveResolve(t, val.Type, val.Format, val.Expected, rt) } } // arrays of primitives and string formats for _, val := range schTypeVals { var sch spec.Schema sch.Typed(val.Type, val.Format) sch.AddExtension("x-isnullable", true) rt, err := resolver.ResolveSchema(new(spec.Schema).CollectionOf(sch), true) if assert.NoError(t, err) { assert.True(t, rt.IsArray) } } } }
func TestTypeResolver_ObjectType(t *testing.T) { _, resolver, err := basicTaskListResolver(t) resolver.ModelName = "TheModel" defer func() { resolver.ModelName = "" }() if assert.NoError(t, err) { //very poor schema definitions (as in none) types := []string{"object", ""} for _, tpe := range types { sch := new(spec.Schema) sch.Typed(tpe, "") rt, err := resolver.ResolveSchema(sch, true) if assert.NoError(t, err) { assert.True(t, rt.IsMap) assert.False(t, rt.IsComplexObject) assert.Equal(t, "map[string]interface{}", rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } sch.Properties = make(map[string]spec.Schema) var ss spec.Schema sch.Properties["tags"] = *(&ss).CollectionOf(*spec.StringProperty()) rt, err = resolver.ResolveSchema(sch, false) assert.True(t, rt.IsComplexObject) assert.False(t, rt.IsMap) assert.Equal(t, "models.TheModel", rt.GoType) assert.Equal(t, "object", rt.SwaggerType) sch.Properties = nil nsch := new(spec.Schema) nsch.Typed(tpe, "") nsch.AllOf = []spec.Schema{*sch} rt, err = resolver.ResolveSchema(nsch, false) if assert.NoError(t, err) { assert.True(t, rt.IsComplexObject) assert.False(t, rt.IsMap) assert.Equal(t, "models.TheModel", rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } } sch := new(spec.Schema) rt, err := resolver.ResolveSchema(sch, true) if assert.NoError(t, err) { assert.True(t, rt.IsMap) assert.False(t, rt.IsComplexObject) assert.Equal(t, "map[string]interface{}", rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } sch = new(spec.Schema) var sp spec.Schema sp.Typed("object", "") sch.AllOf = []spec.Schema{sp} rt, err = resolver.ResolveSchema(sch, true) if assert.NoError(t, err) { assert.True(t, rt.IsComplexObject) assert.False(t, rt.IsMap) assert.Equal(t, "models.TheModel", rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } } }
func TestTypeResolver_AdditionalProperties(t *testing.T) { _, resolver, err := basicTaskListResolver(t) if assert.NoError(t, err) { // primitives as additional properties for _, val := range schTypeVals { sch := new(spec.Schema) sch.Typed(val.Type, val.Format) parent := new(spec.Schema) parent.AdditionalProperties = new(spec.SchemaOrBool) parent.AdditionalProperties.Schema = sch rt, err := resolver.ResolveSchema(parent, true) if assert.NoError(t, err) { assert.True(t, rt.IsMap) assert.False(t, rt.IsComplexObject) assert.Equal(t, "map[string]"+val.Expected, rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } } // array of primitives as additional properties for _, val := range schTypeVals { sch := new(spec.Schema) sch.Typed(val.Type, val.Format) parent := new(spec.Schema) parent.AdditionalProperties = new(spec.SchemaOrBool) parent.AdditionalProperties.Schema = new(spec.Schema).CollectionOf(*sch) rt, err := resolver.ResolveSchema(parent, true) if assert.NoError(t, err) { assert.True(t, rt.IsMap) assert.False(t, rt.IsComplexObject) assert.Equal(t, "map[string][]"+val.Expected, rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } } // refs as additional properties for _, val := range schRefVals { sch := new(spec.Schema) sch.Ref, _ = spec.NewRef("#/definitions/" + val.Type) parent := new(spec.Schema) parent.AdditionalProperties = new(spec.SchemaOrBool) parent.AdditionalProperties.Schema = sch rt, err := resolver.ResolveSchema(parent, true) if assert.NoError(t, err) { assert.True(t, rt.IsMap) assert.False(t, rt.IsComplexObject) assert.Equal(t, "map[string]"+val.Expected, rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } } // when additional properties and properties present, it's a complex object // primitives as additional properties for _, val := range schTypeVals { sch := new(spec.Schema) sch.Typed(val.Type, val.Format) parent := new(spec.Schema) parent.Properties = make(map[string]spec.Schema) parent.Properties["id"] = *spec.Int32Property() parent.AdditionalProperties = new(spec.SchemaOrBool) parent.AdditionalProperties.Schema = sch rt, err := resolver.ResolveSchema(parent, true) if assert.NoError(t, err) { assert.True(t, rt.IsComplexObject) assert.False(t, rt.IsMap) assert.Equal(t, "map[string]"+val.Expected, rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } } // array of primitives as additional properties for _, val := range schTypeVals { sch := new(spec.Schema) sch.Typed(val.Type, val.Format) parent := new(spec.Schema) parent.Properties = make(map[string]spec.Schema) parent.Properties["id"] = *spec.Int32Property() parent.AdditionalProperties = new(spec.SchemaOrBool) parent.AdditionalProperties.Schema = new(spec.Schema).CollectionOf(*sch) rt, err := resolver.ResolveSchema(parent, true) if assert.NoError(t, err) { assert.True(t, rt.IsComplexObject) assert.False(t, rt.IsMap) assert.Equal(t, "map[string][]"+val.Expected, rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } } // refs as additional properties for _, val := range schRefVals { sch := new(spec.Schema) sch.Ref, _ = spec.NewRef("#/definitions/" + val.Type) parent := new(spec.Schema) parent.Properties = make(map[string]spec.Schema) parent.Properties["id"] = *spec.Int32Property() parent.AdditionalProperties = new(spec.SchemaOrBool) parent.AdditionalProperties.Schema = sch rt, err := resolver.ResolveSchema(parent, true) if assert.NoError(t, err) { assert.True(t, rt.IsComplexObject) assert.False(t, rt.IsMap) assert.Equal(t, "map[string]"+val.Expected, rt.GoType) assert.Equal(t, "object", rt.SwaggerType) } } } }
func (scp *schemaParser) createParser(nm string, schema, ps *spec.Schema, fld *ast.Field) *sectionedParser { sp := new(sectionedParser) sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) } if ps.Ref.String() == "" { sp.taggers = []tagParser{ newSingleLineTagParser("maximum", &setMaximum{schemaValidations{ps}, rxf(rxMaximumFmt, "")}), newSingleLineTagParser("minimum", &setMinimum{schemaValidations{ps}, rxf(rxMinimumFmt, "")}), newSingleLineTagParser("multipleOf", &setMultipleOf{schemaValidations{ps}, rxf(rxMultipleOfFmt, "")}), newSingleLineTagParser("minLength", &setMinLength{schemaValidations{ps}, rxf(rxMinLengthFmt, "")}), newSingleLineTagParser("maxLength", &setMaxLength{schemaValidations{ps}, rxf(rxMaxLengthFmt, "")}), newSingleLineTagParser("pattern", &setPattern{schemaValidations{ps}, rxf(rxPatternFmt, "")}), newSingleLineTagParser("minItems", &setMinItems{schemaValidations{ps}, rxf(rxMinItemsFmt, "")}), newSingleLineTagParser("maxItems", &setMaxItems{schemaValidations{ps}, rxf(rxMaxItemsFmt, "")}), newSingleLineTagParser("unique", &setUnique{schemaValidations{ps}, rxf(rxUniqueFmt, "")}), newSingleLineTagParser("required", &setRequiredSchema{schema, nm}), newSingleLineTagParser("readOnly", &setReadOnlySchema{ps}), newSingleLineTagParser("discriminator", &setDiscriminator{schema, nm}), } itemsTaggers := func(items *spec.Schema, level int) []tagParser { // the expression is 1-index based not 0-index itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) return []tagParser{ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{schemaValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{schemaValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{schemaValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{schemaValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{schemaValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{schemaValidations{items}, rxf(rxPatternFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{schemaValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{schemaValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{schemaValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}), } } // check if this is a primitive, if so parse the validations from the // doc comments of the slice declaration. if ftped, ok := fld.Type.(*ast.ArrayType); ok { ftpe := ftped items, level := ps.Items, 0 for items != nil && items.Schema != nil { switch iftpe := ftpe.Elt.(type) { case *ast.ArrayType: eleTaggers := itemsTaggers(items.Schema, level) sp.taggers = append(eleTaggers, sp.taggers...) ftpe = iftpe case *ast.Ident: if iftpe.Obj == nil { sp.taggers = append(itemsTaggers(items.Schema, level), sp.taggers...) } break //default: //return fmt.Errorf("unknown field type (%T) ele for %q", iftpe, nm) } items = items.Schema.Items level = level + 1 } } } else { sp.taggers = []tagParser{ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}), } } return sp }
func (scp *schemaParser) parseStructType(gofile *ast.File, bschema *spec.Schema, tpe *ast.StructType, seenPreviously map[string]struct{}) error { if tpe.Fields == nil { return nil } var schema *spec.Schema seenProperties := seenPreviously hasAllOf := false for _, fld := range tpe.Fields.List { if len(fld.Names) == 0 { // if this created an allOf property then we have to rejig the schema var // because all the fields collected that aren't from embedded structs should go in // their own proper schema // first process embedded structs in order of embedding if allOfMember(fld.Doc) { hasAllOf = true if schema == nil { schema = new(spec.Schema) } var newSch spec.Schema // when the embedded struct is annotated with swagger:allOf it will be used as allOf property // otherwise the fields will just be included as normal properties if err := scp.parseAllOfMember(gofile, &newSch, fld.Type, seenProperties); err != nil { return err } if fld.Doc != nil { for _, cmt := range fld.Doc.List { for _, ln := range strings.Split(cmt.Text, "\n") { matches := rxAllOf.FindStringSubmatch(ln) ml := len(matches) if ml > 1 { mv := matches[ml-1] if mv != "" { bschema.AddExtension("x-class", mv) } } } } } bschema.AllOf = append(bschema.AllOf, newSch) continue } if schema == nil { schema = bschema } // when the embedded struct is annotated with swagger:allOf it will be used as allOf property // otherwise the fields will just be included as normal properties if err := scp.parseEmbeddedType(gofile, schema, fld.Type, seenProperties); err != nil { return err } } } if schema == nil { schema = bschema } // then add and possibly override values if schema.Properties == nil { schema.Properties = make(map[string]spec.Schema) } schema.Typed("object", "") for _, fld := range tpe.Fields.List { var tag string if fld.Tag != nil { val, err := strconv.Unquote(fld.Tag.Value) if err == nil { tag = reflect.StructTag(val).Get("json") } } if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() && (tag == "" || tag[0] != '-') { var nm, gnm string nm = fld.Names[0].Name gnm = nm if fld.Tag != nil && len(strings.TrimSpace(fld.Tag.Value)) > 0 /*&& fld.Tag.Value[0] != '-'*/ { tv, err := strconv.Unquote(fld.Tag.Value) if err != nil { return err } if strings.TrimSpace(tv) != "" { st := reflect.StructTag(tv) if st.Get("json") != "" { nm = strings.Split(st.Get("json"), ",")[0] } } } ps := schema.Properties[nm] if err := parseProperty(scp, gofile, fld.Type, schemaTypable{&ps, 0}); err != nil { return err } if err := scp.createParser(nm, schema, &ps, fld).Parse(fld.Doc); err != nil { return err } if nm != gnm { ps.AddExtension("x-go-name", gnm) } seenProperties[nm] = struct{}{} schema.Properties[nm] = ps } } if schema != nil && hasAllOf { bschema.AllOf = append(bschema.AllOf, *schema) } for k := range schema.Properties { if _, ok := seenProperties[k]; !ok { delete(schema.Properties, k) } } return nil }
func (scp *schemaParser) parseInterfaceType(gofile *ast.File, bschema *spec.Schema, tpe *ast.InterfaceType, seenPreviously map[string]struct{}) error { if tpe.Methods == nil { return nil } // first check if this has embedded interfaces, if so make sure to refer to those by ref // when they are decorated with an allOf annotation // go over the method list again and this time collect the nullary methods and parse the comments // as if they are properties on a struct var schema *spec.Schema seenProperties := seenPreviously hasAllOf := false for _, fld := range tpe.Methods.List { if len(fld.Names) == 0 { // if this created an allOf property then we have to rejig the schema var // because all the fields collected that aren't from embedded structs should go in // their own proper schema // first process embedded structs in order of embedding if allOfMember(fld.Doc) { hasAllOf = true if schema == nil { schema = new(spec.Schema) } var newSch spec.Schema // when the embedded struct is annotated with swagger:allOf it will be used as allOf property // otherwise the fields will just be included as normal properties if err := scp.parseAllOfMember(gofile, &newSch, fld.Type, seenProperties); err != nil { return err } if fld.Doc != nil { for _, cmt := range fld.Doc.List { for _, ln := range strings.Split(cmt.Text, "\n") { matches := rxAllOf.FindStringSubmatch(ln) ml := len(matches) if ml > 1 { mv := matches[ml-1] if mv != "" { bschema.AddExtension("x-class", mv) } } } } } bschema.AllOf = append(bschema.AllOf, newSch) continue } var newSch spec.Schema // when the embedded struct is annotated with swagger:allOf it will be used as allOf property // otherwise the fields will just be included as normal properties if err := scp.parseEmbeddedType(gofile, &newSch, fld.Type, seenProperties); err != nil { return err } bschema.AllOf = append(bschema.AllOf, newSch) hasAllOf = true } } if schema == nil { schema = bschema } // then add and possibly override values if schema.Properties == nil { schema.Properties = make(map[string]spec.Schema) } schema.Typed("object", "") for _, fld := range tpe.Methods.List { if mtpe, ok := fld.Type.(*ast.FuncType); ok && mtpe.Params.NumFields() == 0 && mtpe.Results.NumFields() == 1 { gnm := fld.Names[0].Name nm := gnm if fld.Doc != nil { for _, cmt := range fld.Doc.List { for _, ln := range strings.Split(cmt.Text, "\n") { matches := rxName.FindStringSubmatch(ln) ml := len(matches) if ml > 1 { nm = matches[ml-1] } } } } ps := schema.Properties[nm] if err := parseProperty(scp, gofile, mtpe.Results.List[0].Type, schemaTypable{&ps, 0}); err != nil { return err } if err := scp.createParser(nm, schema, &ps, fld).Parse(fld.Doc); err != nil { return err } if nm != gnm { ps.AddExtension("x-go-name", gnm) } seenProperties[nm] = struct{}{} schema.Properties[nm] = ps } } if schema != nil && hasAllOf { bschema.AllOf = append(bschema.AllOf, *schema) } for k := range schema.Properties { if _, ok := seenProperties[k]; !ok { delete(schema.Properties, k) } } return nil }
func (scp *schemaParser) parseStructType(gofile *ast.File, bschema *spec.Schema, tpe *ast.StructType, seenPreviously map[string]struct{}) error { if tpe.Fields != nil { var schema *spec.Schema seenProperties := seenPreviously for _, fld := range tpe.Fields.List { if len(fld.Names) == 0 { // if this created an allOf property then we have to rejig the schema var // because all the fields collected that aren't from embedded structs should go in // their own proper schema // first process embedded structs in order of embedding if allOfMember(fld.Doc) { if schema == nil { schema = new(spec.Schema) } var newSch spec.Schema // when the embedded struct is annotated with swagger:allOf it will be used as allOf property // otherwise the fields will just be included as normal properties if err := scp.parseAllOfMember(gofile, &newSch, fld.Type, seenProperties); err != nil { return err } bschema.AllOf = append(bschema.AllOf, newSch) continue } if schema == nil { schema = bschema } // when the embedded struct is annotated with swagger:allOf it will be used as allOf property // otherwise the fields will just be included as normal properties if err := scp.parseEmbeddedStruct(gofile, schema, fld.Type, seenProperties); err != nil { return err } } } if schema != nil && len(bschema.AllOf) > 0 { bschema.AllOf = append(bschema.AllOf, *schema) } if schema == nil { schema = bschema } // then add and possibly override values if schema.Properties == nil { schema.Properties = make(map[string]spec.Schema) } schema.Typed("object", "") for _, fld := range tpe.Fields.List { var tag string if fld.Tag != nil { val, err := strconv.Unquote(fld.Tag.Value) if err == nil { tag = reflect.StructTag(val).Get("json") } } if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() && (tag == "" || tag[0] != '-') { var nm, gnm string nm = fld.Names[0].Name gnm = nm if fld.Tag != nil && len(strings.TrimSpace(fld.Tag.Value)) > 0 /*&& fld.Tag.Value[0] != '-'*/ { tv, err := strconv.Unquote(fld.Tag.Value) if err != nil { return err } if strings.TrimSpace(tv) != "" { st := reflect.StructTag(tv) if st.Get("json") != "" { nm = strings.Split(st.Get("json"), ",")[0] } } } ps := schema.Properties[nm] if err := parseProperty(scp, gofile, fld.Type, schemaTypable{&ps, 0}); err != nil { return err } sp := new(sectionedParser) sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) } if ps.Ref.GetURL() == nil { sp.taggers = []tagParser{ newSingleLineTagParser("maximum", &setMaximum{schemaValidations{&ps}, rxf(rxMaximumFmt, "")}), newSingleLineTagParser("minimum", &setMinimum{schemaValidations{&ps}, rxf(rxMinimumFmt, "")}), newSingleLineTagParser("multipleOf", &setMultipleOf{schemaValidations{&ps}, rxf(rxMultipleOfFmt, "")}), newSingleLineTagParser("minLength", &setMinLength{schemaValidations{&ps}, rxf(rxMinLengthFmt, "")}), newSingleLineTagParser("maxLength", &setMaxLength{schemaValidations{&ps}, rxf(rxMaxLengthFmt, "")}), newSingleLineTagParser("pattern", &setPattern{schemaValidations{&ps}, rxf(rxPatternFmt, "")}), newSingleLineTagParser("minItems", &setMinItems{schemaValidations{&ps}, rxf(rxMinItemsFmt, "")}), newSingleLineTagParser("maxItems", &setMaxItems{schemaValidations{&ps}, rxf(rxMaxItemsFmt, "")}), newSingleLineTagParser("unique", &setUnique{schemaValidations{&ps}, rxf(rxUniqueFmt, "")}), newSingleLineTagParser("required", &setRequiredSchema{schema, nm}), newSingleLineTagParser("readOnly", &setReadOnlySchema{&ps}), } itemsTaggers := func(items *spec.Schema, level int) []tagParser { // the expression is 1-index based not 0-index itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) return []tagParser{ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{schemaValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{schemaValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{schemaValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{schemaValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{schemaValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{schemaValidations{items}, rxf(rxPatternFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{schemaValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{schemaValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}), newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{schemaValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}), } } // check if this is a primitive, if so parse the validations from the // doc comments of the slice declaration. if ftped, ok := fld.Type.(*ast.ArrayType); ok { ftpe := ftped items, level := ps.Items, 0 for items != nil && items.Schema != nil { switch iftpe := ftpe.Elt.(type) { case *ast.ArrayType: eleTaggers := itemsTaggers(items.Schema, level) sp.taggers = append(eleTaggers, sp.taggers...) ftpe = iftpe case *ast.Ident: if iftpe.Obj == nil { sp.taggers = append(itemsTaggers(items.Schema, level), sp.taggers...) } break //default: //return fmt.Errorf("unknown field type (%T) ele for %q", iftpe, nm) } items = items.Schema.Items level = level + 1 } } } else { sp.taggers = []tagParser{ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}), } } if err := sp.Parse(fld.Doc); err != nil { return err } if nm != gnm { ps.AddExtension("x-go-name", gnm) } seenProperties[nm] = struct{}{} schema.Properties[nm] = ps } } for k := range schema.Properties { if _, ok := seenProperties[k]; !ok { delete(schema.Properties, k) } } } return nil }