forked from kevadesu/forgejo
Use vendored go-swagger (#8087)
* Use vendored go-swagger * vendor go-swagger * revert un wanteed change * remove un-needed GO111MODULE * Update Makefile Co-Authored-By: techknowlogick <matti@mdranta.net>
This commit is contained in:
parent
4cb1bdddc8
commit
9fe4437bda
686 changed files with 143379 additions and 17 deletions
165
vendor/github.com/go-swagger/go-swagger/scan/classifier.go
generated
vendored
Normal file
165
vendor/github.com/go-swagger/go-swagger/scan/classifier.go
generated
vendored
Normal file
|
@ -0,0 +1,165 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"log"
|
||||
"regexp"
|
||||
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
type packageFilter struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func (pf *packageFilter) Matches(path string) bool {
|
||||
matched, err := regexp.MatchString(pf.Name, path)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return matched
|
||||
}
|
||||
|
||||
type packageFilters []packageFilter
|
||||
|
||||
func (pf packageFilters) HasFilters() bool {
|
||||
return len(pf) > 0
|
||||
}
|
||||
|
||||
func (pf packageFilters) Matches(path string) bool {
|
||||
for _, mod := range pf {
|
||||
if mod.Matches(path) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type classifiedProgram struct {
|
||||
Meta []*ast.File
|
||||
Models []*ast.File
|
||||
Routes []*ast.File
|
||||
Operations []*ast.File
|
||||
Parameters []*ast.File
|
||||
Responses []*ast.File
|
||||
}
|
||||
|
||||
// programClassifier classifies the files of a program into buckets
|
||||
// for processing by a swagger spec generator. This buckets files in
|
||||
// 3 groups: Meta, Models and Operations.
|
||||
//
|
||||
// Each of these buckets is then processed with an appropriate parsing strategy
|
||||
//
|
||||
// When there are Include or Exclude filters provide they are used to limit the
|
||||
// candidates prior to parsing.
|
||||
// The include filters take precedence over the excludes. So when something appears
|
||||
// in both filters it will be included.
|
||||
type programClassifier struct {
|
||||
Includes packageFilters
|
||||
Excludes packageFilters
|
||||
}
|
||||
|
||||
func (pc *programClassifier) Classify(prog *loader.Program) (*classifiedProgram, error) {
|
||||
var cp classifiedProgram
|
||||
for pkg, pkgInfo := range prog.AllPackages {
|
||||
if Debug {
|
||||
log.Printf("analyzing: %s\n", pkg.Path())
|
||||
}
|
||||
if pc.Includes.HasFilters() {
|
||||
if !pc.Includes.Matches(pkg.Path()) {
|
||||
continue
|
||||
}
|
||||
} else if pc.Excludes.HasFilters() {
|
||||
if pc.Excludes.Matches(pkg.Path()) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
for _, file := range pkgInfo.Files {
|
||||
var ro, op, mt, pm, rs, mm bool // only add a particular file once
|
||||
for _, comments := range file.Comments {
|
||||
var seenStruct string
|
||||
for _, cline := range comments.List {
|
||||
if cline != nil {
|
||||
matches := rxSwaggerAnnotation.FindStringSubmatch(cline.Text)
|
||||
if len(matches) > 1 {
|
||||
switch matches[1] {
|
||||
case "route":
|
||||
if !ro {
|
||||
cp.Routes = append(cp.Routes, file)
|
||||
ro = true
|
||||
}
|
||||
case "operation":
|
||||
if !op {
|
||||
cp.Operations = append(cp.Operations, file)
|
||||
op = true
|
||||
}
|
||||
case "model":
|
||||
if !mm {
|
||||
cp.Models = append(cp.Models, file)
|
||||
mm = true
|
||||
}
|
||||
if seenStruct == "" || seenStruct == matches[1] {
|
||||
seenStruct = matches[1]
|
||||
} else {
|
||||
return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q", seenStruct, matches[1])
|
||||
}
|
||||
case "meta":
|
||||
if !mt {
|
||||
cp.Meta = append(cp.Meta, file)
|
||||
mt = true
|
||||
}
|
||||
case "parameters":
|
||||
if !pm {
|
||||
cp.Parameters = append(cp.Parameters, file)
|
||||
pm = true
|
||||
}
|
||||
if seenStruct == "" || seenStruct == matches[1] {
|
||||
seenStruct = matches[1]
|
||||
} else {
|
||||
return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q", seenStruct, matches[1])
|
||||
}
|
||||
case "response":
|
||||
if !rs {
|
||||
cp.Responses = append(cp.Responses, file)
|
||||
rs = true
|
||||
}
|
||||
if seenStruct == "" || seenStruct == matches[1] {
|
||||
seenStruct = matches[1]
|
||||
} else {
|
||||
return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q", seenStruct, matches[1])
|
||||
}
|
||||
case "strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type":
|
||||
// TODO: perhaps collect these and pass along to avoid lookups later on
|
||||
case "allOf":
|
||||
case "ignore":
|
||||
default:
|
||||
return nil, fmt.Errorf("classifier: unknown swagger annotation %q", matches[1])
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &cp, nil
|
||||
}
|
85
vendor/github.com/go-swagger/go-swagger/scan/doc.go
generated
vendored
Normal file
85
vendor/github.com/go-swagger/go-swagger/scan/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,85 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
/*Package scan provides a scanner for go files that produces a swagger spec document.
|
||||
|
||||
You give it a main file and it will parse all the files that are required by that main
|
||||
package to produce a swagger specification.
|
||||
|
||||
To use you can add a go:generate comment to your main file for example:
|
||||
|
||||
//go:generate swagger generate spec
|
||||
|
||||
The following annotations exist:
|
||||
|
||||
swagger:meta
|
||||
|
||||
The swagger:meta annotation flags a file as source for metadata about the API.
|
||||
This is typically a doc.go file with your package documentation.
|
||||
|
||||
You can specify a Consumes and Produces key which has a new content type on each line
|
||||
Schemes is a tag that is required and allows for a comma separated string composed of:
|
||||
http, https, ws or wss
|
||||
|
||||
Host and BasePath can be specified but those values will be defaults,
|
||||
they should get substituted when serving the swagger spec.
|
||||
|
||||
Default parameters and responses are not supported at this stage, for those you can edit the template json.
|
||||
|
||||
swagger:strfmt [name]
|
||||
|
||||
A swagger:strfmt annotation names a type as a string formatter. The name is mandatory and that is
|
||||
what will be used as format name for this particular string format.
|
||||
String formats should only be used for very well known formats.
|
||||
|
||||
swagger:model [?model name]
|
||||
|
||||
A swagger:model annotation optionally gets a model name as extra data on the line.
|
||||
when this appears anywhere in a comment for a struct, then that struct becomes a schema
|
||||
in the definitions object of swagger.
|
||||
|
||||
The struct gets analyzed and all the collected models are added to the tree.
|
||||
The refs are tracked separately so that they can be renamed later on.
|
||||
|
||||
When this annotation is found to be on an interface instead of a struct, the properties are provided
|
||||
through exported nullary methods.
|
||||
|
||||
A property of an interface model can have a Discriminator: true annotation to mark that field as
|
||||
the field that will contain the discriminator value.
|
||||
|
||||
swagger:route [method] [path pattern] [operation id] [?tag1 tag2 tag3]
|
||||
|
||||
A swagger:route annotation links a path to a method.
|
||||
This operation gets a unique id, which is used in various places as method name.
|
||||
One such usage is in method names for client generation for example.
|
||||
|
||||
Because there are many routers available, this tool does not try to parse the paths
|
||||
you provided to your routing library of choice. So you have to specify your path pattern
|
||||
yourself in valid swagger syntax.
|
||||
|
||||
swagger:params [operationid1 operationid2]
|
||||
|
||||
Links a struct to one or more operations. The params in the resulting swagger spec can be composed of several structs.
|
||||
There are no guarantees given on how property name overlaps are resolved when several structs apply to the same operation.
|
||||
This tag works very similarly to the swagger:model tag except that it produces valid parameter objects instead of schema
|
||||
objects.
|
||||
|
||||
swagger:response [?response name]
|
||||
|
||||
Reads a struct decorated with swagger:response and uses that information to fill up the headers and the schema for a response.
|
||||
A swagger:route can specify a response name for a status code and then the matching response will be used for that operation in the swagger definition.
|
||||
*/
|
||||
package scan
|
245
vendor/github.com/go-swagger/go-swagger/scan/meta.go
generated
vendored
Normal file
245
vendor/github.com/go-swagger/go-swagger/scan/meta.go
generated
vendored
Normal file
|
@ -0,0 +1,245 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/mail"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
func metaTOSSetter(meta *spec.Info) func([]string) {
|
||||
return func(lines []string) {
|
||||
meta.TermsOfService = joinDropLast(lines)
|
||||
}
|
||||
}
|
||||
|
||||
func metaConsumesSetter(meta *spec.Swagger) func([]string) {
|
||||
return func(consumes []string) { meta.Consumes = consumes }
|
||||
}
|
||||
|
||||
func metaProducesSetter(meta *spec.Swagger) func([]string) {
|
||||
return func(produces []string) { meta.Produces = produces }
|
||||
}
|
||||
|
||||
func metaSchemeSetter(meta *spec.Swagger) func([]string) {
|
||||
return func(schemes []string) { meta.Schemes = schemes }
|
||||
}
|
||||
|
||||
func metaSecuritySetter(meta *spec.Swagger) func([]map[string][]string) {
|
||||
return func(secDefs []map[string][]string) { meta.Security = secDefs }
|
||||
}
|
||||
|
||||
func metaSecurityDefinitionsSetter(meta *spec.Swagger) func(json.RawMessage) error {
|
||||
return func(jsonValue json.RawMessage) error {
|
||||
var jsonData spec.SecurityDefinitions
|
||||
err := json.Unmarshal(jsonValue, &jsonData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
meta.SecurityDefinitions = jsonData
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func metaVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
|
||||
return func(jsonValue json.RawMessage) error {
|
||||
var jsonData spec.Extensions
|
||||
err := json.Unmarshal(jsonValue, &jsonData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for k := range jsonData {
|
||||
if !rxAllowedExtensions.MatchString(k) {
|
||||
return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
|
||||
}
|
||||
}
|
||||
meta.Extensions = jsonData
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
|
||||
return func(jsonValue json.RawMessage) error {
|
||||
var jsonData spec.Extensions
|
||||
err := json.Unmarshal(jsonValue, &jsonData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for k := range jsonData {
|
||||
if !rxAllowedExtensions.MatchString(k) {
|
||||
return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
|
||||
}
|
||||
}
|
||||
meta.Info.Extensions = jsonData
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func newMetaParser(swspec *spec.Swagger) *sectionedParser {
|
||||
sp := new(sectionedParser)
|
||||
if swspec.Info == nil {
|
||||
swspec.Info = new(spec.Info)
|
||||
}
|
||||
info := swspec.Info
|
||||
sp.setTitle = func(lines []string) {
|
||||
tosave := joinDropLast(lines)
|
||||
if len(tosave) > 0 {
|
||||
tosave = rxStripTitleComments.ReplaceAllString(tosave, "")
|
||||
}
|
||||
info.Title = tosave
|
||||
}
|
||||
sp.setDescription = func(lines []string) { info.Description = joinDropLast(lines) }
|
||||
sp.taggers = []tagParser{
|
||||
newMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false),
|
||||
newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false),
|
||||
newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false),
|
||||
newSingleLineTagParser("Schemes", newSetSchemes(metaSchemeSetter(swspec))),
|
||||
newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false),
|
||||
newMultiLineTagParser("SecurityDefinitions", newYamlParser(rxSecurity, metaSecurityDefinitionsSetter(swspec)), true),
|
||||
newSingleLineTagParser("Version", &setMetaSingle{swspec, rxVersion, setInfoVersion}),
|
||||
newSingleLineTagParser("Host", &setMetaSingle{swspec, rxHost, setSwaggerHost}),
|
||||
newSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}),
|
||||
newSingleLineTagParser("Contact", &setMetaSingle{swspec, rxContact, setInfoContact}),
|
||||
newSingleLineTagParser("License", &setMetaSingle{swspec, rxLicense, setInfoLicense}),
|
||||
newMultiLineTagParser("YAMLInfoExtensionsBlock", newYamlParser(rxInfoExtensions, infoVendorExtensibleSetter(swspec)), true),
|
||||
newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, metaVendorExtensibleSetter(swspec)), true),
|
||||
}
|
||||
return sp
|
||||
}
|
||||
|
||||
type setMetaSingle struct {
|
||||
spec *spec.Swagger
|
||||
rx *regexp.Regexp
|
||||
set func(spec *spec.Swagger, lines []string) error
|
||||
}
|
||||
|
||||
func (s *setMetaSingle) Matches(line string) bool {
|
||||
return s.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (s *setMetaSingle) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := s.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
return s.set(s.spec, []string{matches[1]})
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setSwaggerHost(swspec *spec.Swagger, lines []string) error {
|
||||
lns := lines
|
||||
if len(lns) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
lns = []string{"localhost"}
|
||||
}
|
||||
swspec.Host = lns[0]
|
||||
return nil
|
||||
}
|
||||
|
||||
func setSwaggerBasePath(swspec *spec.Swagger, lines []string) error {
|
||||
var ln string
|
||||
if len(lines) > 0 {
|
||||
ln = lines[0]
|
||||
}
|
||||
swspec.BasePath = ln
|
||||
return nil
|
||||
}
|
||||
|
||||
func setInfoVersion(swspec *spec.Swagger, lines []string) error {
|
||||
if len(lines) == 0 {
|
||||
return nil
|
||||
}
|
||||
info := safeInfo(swspec)
|
||||
info.Version = strings.TrimSpace(lines[0])
|
||||
return nil
|
||||
}
|
||||
|
||||
func setInfoContact(swspec *spec.Swagger, lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
contact, err := parseContactInfo(lines[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
info := safeInfo(swspec)
|
||||
info.Contact = contact
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseContactInfo(line string) (*spec.ContactInfo, error) {
|
||||
nameEmail, url := splitURL(line)
|
||||
var name, email string
|
||||
if len(nameEmail) > 0 {
|
||||
addr, err := mail.ParseAddress(nameEmail)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
name, email = addr.Name, addr.Address
|
||||
}
|
||||
return &spec.ContactInfo{
|
||||
URL: url,
|
||||
Name: name,
|
||||
Email: email,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func setInfoLicense(swspec *spec.Swagger, lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
info := safeInfo(swspec)
|
||||
line := lines[0]
|
||||
name, url := splitURL(line)
|
||||
info.License = &spec.License{
|
||||
Name: name,
|
||||
URL: url,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func safeInfo(swspec *spec.Swagger) *spec.Info {
|
||||
if swspec.Info == nil {
|
||||
swspec.Info = new(spec.Info)
|
||||
}
|
||||
return swspec.Info
|
||||
}
|
||||
|
||||
// httpFTPScheme matches http://, https://, ws://, wss://
|
||||
var httpFTPScheme = regexp.MustCompile("(?:(?:ht|f)tp|ws)s?://")
|
||||
|
||||
func splitURL(line string) (notURL, url string) {
|
||||
str := strings.TrimSpace(line)
|
||||
parts := httpFTPScheme.FindStringIndex(str)
|
||||
if len(parts) == 0 {
|
||||
if len(str) > 0 {
|
||||
notURL = str
|
||||
}
|
||||
return
|
||||
}
|
||||
if len(parts) > 0 {
|
||||
notURL = strings.TrimSpace(str[:parts[0]])
|
||||
url = strings.TrimSpace(str[parts[0]:])
|
||||
}
|
||||
return
|
||||
}
|
84
vendor/github.com/go-swagger/go-swagger/scan/operations.go
generated
vendored
Normal file
84
vendor/github.com/go-swagger/go-swagger/scan/operations.go
generated
vendored
Normal file
|
@ -0,0 +1,84 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
func newOperationsParser(prog *loader.Program) *operationsParser {
|
||||
return &operationsParser{
|
||||
program: prog,
|
||||
}
|
||||
}
|
||||
|
||||
type operationsParser struct {
|
||||
program *loader.Program
|
||||
definitions map[string]spec.Schema
|
||||
operations map[string]*spec.Operation
|
||||
responses map[string]spec.Response
|
||||
}
|
||||
|
||||
func (op *operationsParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error {
|
||||
tgt := target.(*spec.Paths)
|
||||
for _, comsec := range gofile.Comments {
|
||||
content := parsePathAnnotation(rxOperation, comsec.List)
|
||||
|
||||
if content.Method == "" {
|
||||
continue // it's not, next!
|
||||
}
|
||||
|
||||
if !shouldAcceptTag(content.Tags, includeTags, excludeTags) {
|
||||
if Debug {
|
||||
fmt.Printf("operation %s %s is ignored due to tag rules\n", content.Method, content.Path)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
pthObj := tgt.Paths[content.Path]
|
||||
|
||||
op := setPathOperation(
|
||||
content.Method, content.ID,
|
||||
&pthObj, op.operations[content.ID])
|
||||
|
||||
op.Tags = content.Tags
|
||||
|
||||
sp := new(yamlSpecScanner)
|
||||
sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
|
||||
sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
|
||||
|
||||
if err := sp.Parse(content.Remaining); err != nil {
|
||||
return fmt.Errorf("operation (%s): %v", op.ID, err)
|
||||
}
|
||||
if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil {
|
||||
return fmt.Errorf("operation (%s): %v", op.ID, err)
|
||||
}
|
||||
|
||||
if tgt.Paths == nil {
|
||||
tgt.Paths = make(map[string]spec.PathItem)
|
||||
}
|
||||
|
||||
tgt.Paths[content.Path] = pthObj
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
506
vendor/github.com/go-swagger/go-swagger/scan/parameters.go
generated
vendored
Normal file
506
vendor/github.com/go-swagger/go-swagger/scan/parameters.go
generated
vendored
Normal file
|
@ -0,0 +1,506 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
type operationValidationBuilder interface {
|
||||
validationBuilder
|
||||
SetCollectionFormat(string)
|
||||
}
|
||||
|
||||
type paramTypable struct {
|
||||
param *spec.Parameter
|
||||
}
|
||||
|
||||
func (pt paramTypable) Level() int { return 0 }
|
||||
|
||||
func (pt paramTypable) Typed(tpe, format string) {
|
||||
pt.param.Typed(tpe, format)
|
||||
}
|
||||
|
||||
func (pt paramTypable) SetRef(ref spec.Ref) {
|
||||
pt.param.Ref = ref
|
||||
}
|
||||
|
||||
func (pt paramTypable) Items() swaggerTypable {
|
||||
bdt, schema := bodyTypable(pt.param.In, pt.param.Schema)
|
||||
if bdt != nil {
|
||||
pt.param.Schema = schema
|
||||
return bdt
|
||||
}
|
||||
|
||||
if pt.param.Items == nil {
|
||||
pt.param.Items = new(spec.Items)
|
||||
}
|
||||
pt.param.Type = "array"
|
||||
return itemsTypable{pt.param.Items, 1}
|
||||
}
|
||||
|
||||
func (pt paramTypable) Schema() *spec.Schema {
|
||||
if pt.param.In != "body" {
|
||||
return nil
|
||||
}
|
||||
if pt.param.Schema == nil {
|
||||
pt.param.Schema = new(spec.Schema)
|
||||
}
|
||||
return pt.param.Schema
|
||||
}
|
||||
|
||||
type itemsTypable struct {
|
||||
items *spec.Items
|
||||
level int
|
||||
}
|
||||
|
||||
func (pt itemsTypable) Level() int { return pt.level }
|
||||
|
||||
func (pt itemsTypable) Typed(tpe, format string) {
|
||||
pt.items.Typed(tpe, format)
|
||||
}
|
||||
|
||||
func (pt itemsTypable) SetRef(ref spec.Ref) {
|
||||
pt.items.Ref = ref
|
||||
}
|
||||
|
||||
func (pt itemsTypable) Schema() *spec.Schema {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pt itemsTypable) Items() swaggerTypable {
|
||||
if pt.items.Items == nil {
|
||||
pt.items.Items = new(spec.Items)
|
||||
}
|
||||
pt.items.Type = "array"
|
||||
return itemsTypable{pt.items.Items, pt.level + 1}
|
||||
}
|
||||
|
||||
type paramValidations struct {
|
||||
current *spec.Parameter
|
||||
}
|
||||
|
||||
func (sv paramValidations) SetMaximum(val float64, exclusive bool) {
|
||||
sv.current.Maximum = &val
|
||||
sv.current.ExclusiveMaximum = exclusive
|
||||
}
|
||||
func (sv paramValidations) SetMinimum(val float64, exclusive bool) {
|
||||
sv.current.Minimum = &val
|
||||
sv.current.ExclusiveMinimum = exclusive
|
||||
}
|
||||
func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
|
||||
func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
|
||||
func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
|
||||
func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
|
||||
func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
|
||||
func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val }
|
||||
func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
|
||||
func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
|
||||
func (sv paramValidations) SetEnum(val string) {
|
||||
sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
|
||||
}
|
||||
func (sv paramValidations) SetDefault(val interface{}) { sv.current.Default = val }
|
||||
func (sv paramValidations) SetExample(val interface{}) { sv.current.Example = val }
|
||||
|
||||
type itemsValidations struct {
|
||||
current *spec.Items
|
||||
}
|
||||
|
||||
func (sv itemsValidations) SetMaximum(val float64, exclusive bool) {
|
||||
sv.current.Maximum = &val
|
||||
sv.current.ExclusiveMaximum = exclusive
|
||||
}
|
||||
func (sv itemsValidations) SetMinimum(val float64, exclusive bool) {
|
||||
sv.current.Minimum = &val
|
||||
sv.current.ExclusiveMinimum = exclusive
|
||||
}
|
||||
func (sv itemsValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
|
||||
func (sv itemsValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
|
||||
func (sv itemsValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
|
||||
func (sv itemsValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
|
||||
func (sv itemsValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
|
||||
func (sv itemsValidations) SetPattern(val string) { sv.current.Pattern = val }
|
||||
func (sv itemsValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
|
||||
func (sv itemsValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
|
||||
func (sv itemsValidations) SetEnum(val string) {
|
||||
sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
|
||||
}
|
||||
func (sv itemsValidations) SetDefault(val interface{}) { sv.current.Default = val }
|
||||
func (sv itemsValidations) SetExample(val interface{}) { sv.current.Example = val }
|
||||
|
||||
type paramDecl struct {
|
||||
File *ast.File
|
||||
Decl *ast.GenDecl
|
||||
TypeSpec *ast.TypeSpec
|
||||
OperationIDs []string
|
||||
}
|
||||
|
||||
func (sd *paramDecl) inferOperationIDs() (opids []string) {
|
||||
if len(sd.OperationIDs) > 0 {
|
||||
opids = sd.OperationIDs
|
||||
return
|
||||
}
|
||||
|
||||
if sd.Decl.Doc != nil {
|
||||
for _, cmt := range sd.Decl.Doc.List {
|
||||
for _, ln := range strings.Split(cmt.Text, "\n") {
|
||||
matches := rxParametersOverride.FindStringSubmatch(ln)
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
for _, pt := range strings.Split(matches[1], " ") {
|
||||
tr := strings.TrimSpace(pt)
|
||||
if len(tr) > 0 {
|
||||
opids = append(opids, tr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
sd.OperationIDs = append(sd.OperationIDs, opids...)
|
||||
return
|
||||
}
|
||||
|
||||
func newParameterParser(prog *loader.Program) *paramStructParser {
|
||||
scp := new(paramStructParser)
|
||||
scp.program = prog
|
||||
scp.scp = newSchemaParser(prog)
|
||||
return scp
|
||||
}
|
||||
|
||||
type paramStructParser struct {
|
||||
program *loader.Program
|
||||
postDecls []schemaDecl
|
||||
scp *schemaParser
|
||||
}
|
||||
|
||||
// Parse will traverse a file and look for parameters.
|
||||
func (pp *paramStructParser) Parse(gofile *ast.File, target interface{}) error {
|
||||
tgt := target.(map[string]*spec.Operation)
|
||||
for _, decl := range gofile.Decls {
|
||||
switch x1 := decl.(type) {
|
||||
// Check for parameters at the package level.
|
||||
case *ast.GenDecl:
|
||||
for _, spc := range x1.Specs {
|
||||
switch x2 := spc.(type) {
|
||||
case *ast.TypeSpec:
|
||||
sd := paramDecl{gofile, x1, x2, nil}
|
||||
sd.inferOperationIDs()
|
||||
if err := pp.parseDecl(tgt, sd); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check for parameters inside functions.
|
||||
case *ast.FuncDecl:
|
||||
for _, b := range x1.Body.List {
|
||||
switch x2 := b.(type) {
|
||||
case *ast.DeclStmt:
|
||||
switch x3 := x2.Decl.(type) {
|
||||
case *ast.GenDecl:
|
||||
for _, spc := range x3.Specs {
|
||||
switch x4 := spc.(type) {
|
||||
case *ast.TypeSpec:
|
||||
sd := paramDecl{gofile, x3, x4, nil}
|
||||
sd.inferOperationIDs()
|
||||
if err := pp.parseDecl(tgt, sd); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pp *paramStructParser) parseDecl(operations map[string]*spec.Operation, decl paramDecl) error {
|
||||
// check if there is a swagger:parameters tag that is followed by one or more words,
|
||||
// these words are the ids of the operations this parameter struct applies to
|
||||
// once type name is found convert it to a schema, by looking up the schema in the
|
||||
// parameters dictionary that got passed into this parse method
|
||||
for _, opid := range decl.inferOperationIDs() {
|
||||
operation, ok := operations[opid]
|
||||
if !ok {
|
||||
operation = new(spec.Operation)
|
||||
operations[opid] = operation
|
||||
operation.ID = opid
|
||||
}
|
||||
|
||||
// analyze struct body for fields etc
|
||||
// each exported struct field:
|
||||
// * gets a type mapped to a go primitive
|
||||
// * perhaps gets a format
|
||||
// * has to document the validations that apply for the type and the field
|
||||
// * when the struct field points to a model it becomes a ref: #/definitions/ModelName
|
||||
// * comments that aren't tags is used as the description
|
||||
if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok {
|
||||
if err := pp.parseStructType(decl.File, operation, tpe, make(map[string]spec.Parameter)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
//operations[opid] = operation
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pp *paramStructParser) parseEmbeddedStruct(gofile *ast.File, operation *spec.Operation, expr ast.Expr, seenPreviously map[string]spec.Parameter) error {
|
||||
switch tpe := expr.(type) {
|
||||
case *ast.Ident:
|
||||
// do lookup of type
|
||||
// take primitives into account, they should result in an error for swagger
|
||||
pkg, err := pp.scp.packageForFile(gofile, tpe)
|
||||
if err != nil {
|
||||
return fmt.Errorf("embedded struct: %v", err)
|
||||
}
|
||||
file, _, ts, err := findSourceFile(pkg, tpe.Name)
|
||||
if err != nil {
|
||||
return fmt.Errorf("embedded struct: %v", err)
|
||||
}
|
||||
if st, ok := ts.Type.(*ast.StructType); ok {
|
||||
return pp.parseStructType(file, operation, st, seenPreviously)
|
||||
}
|
||||
case *ast.SelectorExpr:
|
||||
// look up package, file and then type
|
||||
pkg, err := pp.scp.packageForSelector(gofile, tpe.X)
|
||||
if err != nil {
|
||||
return fmt.Errorf("embedded struct: %v", err)
|
||||
}
|
||||
file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
|
||||
if err != nil {
|
||||
return fmt.Errorf("embedded struct: %v", err)
|
||||
}
|
||||
if st, ok := ts.Type.(*ast.StructType); ok {
|
||||
return pp.parseStructType(file, operation, st, seenPreviously)
|
||||
}
|
||||
case *ast.StarExpr:
|
||||
return pp.parseEmbeddedStruct(gofile, operation, tpe.X, seenPreviously)
|
||||
}
|
||||
fmt.Printf("3%#v\n", expr)
|
||||
return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
|
||||
}
|
||||
|
||||
func (pp *paramStructParser) parseStructType(gofile *ast.File, operation *spec.Operation, tpe *ast.StructType, seenPreviously map[string]spec.Parameter) error {
|
||||
if tpe.Fields != nil {
|
||||
pt := seenPreviously
|
||||
|
||||
for _, fld := range tpe.Fields.List {
|
||||
if len(fld.Names) == 0 {
|
||||
// when the embedded struct is annotated with swagger:allOf it will be used as allOf property
|
||||
// otherwise the fields will just be included as normal properties
|
||||
if err := pp.parseEmbeddedStruct(gofile, operation, fld.Type, pt); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// a slice used to keep track of the sequence of the map keys, as maps does not keep to any specific sequence (since Go-1.4)
|
||||
sequence := []string{}
|
||||
|
||||
for _, fld := range tpe.Fields.List {
|
||||
if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
|
||||
gnm := fld.Names[0].Name
|
||||
nm, ignore, _, err := parseJSONTag(fld)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ignore {
|
||||
continue
|
||||
}
|
||||
|
||||
in := "query"
|
||||
// scan for param location first, this changes some behavior down the line
|
||||
if fld.Doc != nil {
|
||||
for _, cmt := range fld.Doc.List {
|
||||
for _, line := range strings.Split(cmt.Text, "\n") {
|
||||
matches := rxIn.FindStringSubmatch(line)
|
||||
if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
|
||||
in = strings.TrimSpace(matches[1])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ps := pt[nm]
|
||||
ps.In = in
|
||||
var pty swaggerTypable = paramTypable{&ps}
|
||||
if in == "body" {
|
||||
pty = schemaTypable{pty.Schema(), 0}
|
||||
}
|
||||
if in == "formData" && fld.Doc != nil && fileParam(fld.Doc) {
|
||||
pty.Typed("file", "")
|
||||
} else {
|
||||
if err := pp.scp.parseNamedType(gofile, fld.Type, pty); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if strfmtName, ok := strfmtName(fld.Doc); ok {
|
||||
ps.Typed("string", strfmtName)
|
||||
ps.Ref = spec.Ref{}
|
||||
}
|
||||
|
||||
sp := new(sectionedParser)
|
||||
sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
|
||||
if ps.Ref.String() == "" {
|
||||
sp.taggers = []tagParser{
|
||||
newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
|
||||
newSingleLineTagParser("maximum", &setMaximum{paramValidations{&ps}, rxf(rxMaximumFmt, "")}),
|
||||
newSingleLineTagParser("minimum", &setMinimum{paramValidations{&ps}, rxf(rxMinimumFmt, "")}),
|
||||
newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
|
||||
newSingleLineTagParser("minLength", &setMinLength{paramValidations{&ps}, rxf(rxMinLengthFmt, "")}),
|
||||
newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
|
||||
newSingleLineTagParser("pattern", &setPattern{paramValidations{&ps}, rxf(rxPatternFmt, "")}),
|
||||
newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
|
||||
newSingleLineTagParser("minItems", &setMinItems{paramValidations{&ps}, rxf(rxMinItemsFmt, "")}),
|
||||
newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
|
||||
newSingleLineTagParser("unique", &setUnique{paramValidations{&ps}, rxf(rxUniqueFmt, "")}),
|
||||
newSingleLineTagParser("enum", &setEnum{paramValidations{&ps}, rxf(rxEnumFmt, "")}),
|
||||
newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxDefaultFmt, "")}),
|
||||
newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxExampleFmt, "")}),
|
||||
newSingleLineTagParser("required", &setRequiredParam{&ps}),
|
||||
}
|
||||
|
||||
itemsTaggers := func(items *spec.Items, level int) []tagParser {
|
||||
// the expression is 1-index based not 0-index
|
||||
itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
|
||||
|
||||
return []tagParser{
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
|
||||
}
|
||||
}
|
||||
|
||||
var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
|
||||
parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
|
||||
if items == nil {
|
||||
return []tagParser{}, nil
|
||||
}
|
||||
switch iftpe := expr.(type) {
|
||||
case *ast.ArrayType:
|
||||
eleTaggers := itemsTaggers(items, level)
|
||||
sp.taggers = append(eleTaggers, sp.taggers...)
|
||||
otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return otherTaggers, nil
|
||||
case *ast.SelectorExpr:
|
||||
otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return otherTaggers, nil
|
||||
case *ast.Ident:
|
||||
taggers := []tagParser{}
|
||||
if iftpe.Obj == nil {
|
||||
taggers = itemsTaggers(items, level)
|
||||
}
|
||||
otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return append(taggers, otherTaggers...), nil
|
||||
case *ast.StarExpr:
|
||||
otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return otherTaggers, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown field type ele for %q", nm)
|
||||
}
|
||||
}
|
||||
|
||||
// check if this is a primitive, if so parse the validations from the
|
||||
// doc comments of the slice declaration.
|
||||
if ftped, ok := fld.Type.(*ast.ArrayType); ok {
|
||||
taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sp.taggers = append(taggers, sp.taggers...)
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
sp.taggers = []tagParser{
|
||||
newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
|
||||
newSingleLineTagParser("required", &matchOnlyParam{&ps, rxRequired}),
|
||||
}
|
||||
}
|
||||
if err := sp.Parse(fld.Doc); err != nil {
|
||||
return err
|
||||
}
|
||||
if ps.In == "path" {
|
||||
ps.Required = true
|
||||
}
|
||||
|
||||
if ps.Name == "" {
|
||||
ps.Name = nm
|
||||
}
|
||||
|
||||
if nm != gnm {
|
||||
addExtension(&ps.VendorExtensible, "x-go-name", gnm)
|
||||
}
|
||||
pt[nm] = ps
|
||||
sequence = append(sequence, nm)
|
||||
}
|
||||
}
|
||||
|
||||
for _, k := range sequence {
|
||||
p := pt[k]
|
||||
for i, v := range operation.Parameters {
|
||||
if v.Name == k {
|
||||
operation.Parameters = append(operation.Parameters[:i], operation.Parameters[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
operation.Parameters = append(operation.Parameters, p)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func isAliasParam(prop swaggerTypable) bool {
|
||||
var isParam bool
|
||||
if param, ok := prop.(paramTypable); ok {
|
||||
isParam = param.param.In == "query" ||
|
||||
param.param.In == "path" ||
|
||||
param.param.In == "formData"
|
||||
}
|
||||
return isParam
|
||||
}
|
150
vendor/github.com/go-swagger/go-swagger/scan/path.go
generated
vendored
Normal file
150
vendor/github.com/go-swagger/go-swagger/scan/path.go
generated
vendored
Normal file
|
@ -0,0 +1,150 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
type parsedPathContent struct {
|
||||
Method, Path, ID string
|
||||
Tags []string
|
||||
Remaining *ast.CommentGroup
|
||||
}
|
||||
|
||||
func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt parsedPathContent) {
|
||||
var justMatched bool
|
||||
|
||||
for _, cmt := range lines {
|
||||
for _, line := range strings.Split(cmt.Text, "\n") {
|
||||
matches := annotation.FindStringSubmatch(line)
|
||||
if len(matches) > 3 {
|
||||
cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1]
|
||||
cnt.Tags = rxSpace.Split(matches[3], -1)
|
||||
if len(matches[3]) == 0 {
|
||||
cnt.Tags = nil
|
||||
}
|
||||
justMatched = true
|
||||
} else if cnt.Method != "" {
|
||||
if cnt.Remaining == nil {
|
||||
cnt.Remaining = new(ast.CommentGroup)
|
||||
}
|
||||
if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" {
|
||||
cc := new(ast.Comment)
|
||||
cc.Slash = cmt.Slash
|
||||
cc.Text = line
|
||||
cnt.Remaining.List = append(cnt.Remaining.List, cc)
|
||||
justMatched = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func setPathOperation(method, id string, pthObj *spec.PathItem, op *spec.Operation) *spec.Operation {
|
||||
if op == nil {
|
||||
op = new(spec.Operation)
|
||||
op.ID = id
|
||||
}
|
||||
|
||||
switch strings.ToUpper(method) {
|
||||
case "GET":
|
||||
if pthObj.Get != nil {
|
||||
if id == pthObj.Get.ID {
|
||||
op = pthObj.Get
|
||||
} else {
|
||||
pthObj.Get = op
|
||||
}
|
||||
} else {
|
||||
pthObj.Get = op
|
||||
}
|
||||
|
||||
case "POST":
|
||||
if pthObj.Post != nil {
|
||||
if id == pthObj.Post.ID {
|
||||
op = pthObj.Post
|
||||
} else {
|
||||
pthObj.Post = op
|
||||
}
|
||||
} else {
|
||||
pthObj.Post = op
|
||||
}
|
||||
|
||||
case "PUT":
|
||||
if pthObj.Put != nil {
|
||||
if id == pthObj.Put.ID {
|
||||
op = pthObj.Put
|
||||
} else {
|
||||
pthObj.Put = op
|
||||
}
|
||||
} else {
|
||||
pthObj.Put = op
|
||||
}
|
||||
|
||||
case "PATCH":
|
||||
if pthObj.Patch != nil {
|
||||
if id == pthObj.Patch.ID {
|
||||
op = pthObj.Patch
|
||||
} else {
|
||||
pthObj.Patch = op
|
||||
}
|
||||
} else {
|
||||
pthObj.Patch = op
|
||||
}
|
||||
|
||||
case "HEAD":
|
||||
if pthObj.Head != nil {
|
||||
if id == pthObj.Head.ID {
|
||||
op = pthObj.Head
|
||||
} else {
|
||||
pthObj.Head = op
|
||||
}
|
||||
} else {
|
||||
pthObj.Head = op
|
||||
}
|
||||
|
||||
case "DELETE":
|
||||
if pthObj.Delete != nil {
|
||||
if id == pthObj.Delete.ID {
|
||||
op = pthObj.Delete
|
||||
} else {
|
||||
pthObj.Delete = op
|
||||
}
|
||||
} else {
|
||||
pthObj.Delete = op
|
||||
}
|
||||
|
||||
case "OPTIONS":
|
||||
if pthObj.Options != nil {
|
||||
if id == pthObj.Options.ID {
|
||||
op = pthObj.Options
|
||||
} else {
|
||||
pthObj.Options = op
|
||||
}
|
||||
} else {
|
||||
pthObj.Options = op
|
||||
}
|
||||
}
|
||||
|
||||
return op
|
||||
}
|
447
vendor/github.com/go-swagger/go-swagger/scan/responses.go
generated
vendored
Normal file
447
vendor/github.com/go-swagger/go-swagger/scan/responses.go
generated
vendored
Normal file
|
@ -0,0 +1,447 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/loader"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
type responseTypable struct {
|
||||
in string
|
||||
header *spec.Header
|
||||
response *spec.Response
|
||||
}
|
||||
|
||||
func (ht responseTypable) Level() int { return 0 }
|
||||
|
||||
func (ht responseTypable) Typed(tpe, format string) {
|
||||
ht.header.Typed(tpe, format)
|
||||
}
|
||||
|
||||
func bodyTypable(in string, schema *spec.Schema) (swaggerTypable, *spec.Schema) {
|
||||
if in == "body" {
|
||||
// get the schema for items on the schema property
|
||||
if schema == nil {
|
||||
schema = new(spec.Schema)
|
||||
}
|
||||
if schema.Items == nil {
|
||||
schema.Items = new(spec.SchemaOrArray)
|
||||
}
|
||||
if schema.Items.Schema == nil {
|
||||
schema.Items.Schema = new(spec.Schema)
|
||||
}
|
||||
schema.Typed("array", "")
|
||||
return schemaTypable{schema.Items.Schema, 0}, schema
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (ht responseTypable) Items() swaggerTypable {
|
||||
bdt, schema := bodyTypable(ht.in, ht.response.Schema)
|
||||
if bdt != nil {
|
||||
ht.response.Schema = schema
|
||||
return bdt
|
||||
}
|
||||
|
||||
if ht.header.Items == nil {
|
||||
ht.header.Items = new(spec.Items)
|
||||
}
|
||||
ht.header.Type = "array"
|
||||
return itemsTypable{ht.header.Items, 1}
|
||||
}
|
||||
|
||||
func (ht responseTypable) SetRef(ref spec.Ref) {
|
||||
// having trouble seeing the usefulness of this one here
|
||||
ht.Schema().Ref = ref
|
||||
}
|
||||
|
||||
func (ht responseTypable) Schema() *spec.Schema {
|
||||
if ht.response.Schema == nil {
|
||||
ht.response.Schema = new(spec.Schema)
|
||||
}
|
||||
return ht.response.Schema
|
||||
}
|
||||
|
||||
func (ht responseTypable) SetSchema(schema *spec.Schema) {
|
||||
ht.response.Schema = schema
|
||||
}
|
||||
func (ht responseTypable) CollectionOf(items *spec.Items, format string) {
|
||||
ht.header.CollectionOf(items, format)
|
||||
}
|
||||
|
||||
type headerValidations struct {
|
||||
current *spec.Header
|
||||
}
|
||||
|
||||
func (sv headerValidations) SetMaximum(val float64, exclusive bool) {
|
||||
sv.current.Maximum = &val
|
||||
sv.current.ExclusiveMaximum = exclusive
|
||||
}
|
||||
func (sv headerValidations) SetMinimum(val float64, exclusive bool) {
|
||||
sv.current.Minimum = &val
|
||||
sv.current.ExclusiveMinimum = exclusive
|
||||
}
|
||||
func (sv headerValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
|
||||
func (sv headerValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
|
||||
func (sv headerValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
|
||||
func (sv headerValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
|
||||
func (sv headerValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
|
||||
func (sv headerValidations) SetPattern(val string) { sv.current.Pattern = val }
|
||||
func (sv headerValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
|
||||
func (sv headerValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
|
||||
func (sv headerValidations) SetEnum(val string) {
|
||||
sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
|
||||
}
|
||||
func (sv headerValidations) SetDefault(val interface{}) { sv.current.Default = val }
|
||||
func (sv headerValidations) SetExample(val interface{}) { sv.current.Example = val }
|
||||
|
||||
func newResponseDecl(file *ast.File, decl *ast.GenDecl, ts *ast.TypeSpec) responseDecl {
|
||||
var rd responseDecl
|
||||
rd.File = file
|
||||
rd.Decl = decl
|
||||
rd.TypeSpec = ts
|
||||
rd.inferNames()
|
||||
return rd
|
||||
}
|
||||
|
||||
type responseDecl struct {
|
||||
File *ast.File
|
||||
Decl *ast.GenDecl
|
||||
TypeSpec *ast.TypeSpec
|
||||
GoName string
|
||||
Name string
|
||||
annotated bool
|
||||
}
|
||||
|
||||
func (sd *responseDecl) hasAnnotation() bool {
|
||||
sd.inferNames()
|
||||
return sd.annotated
|
||||
}
|
||||
|
||||
func (sd *responseDecl) inferNames() (goName string, name string) {
|
||||
if sd.GoName != "" {
|
||||
goName, name = sd.GoName, sd.Name
|
||||
return
|
||||
}
|
||||
goName = sd.TypeSpec.Name.Name
|
||||
name = goName
|
||||
if sd.Decl.Doc != nil {
|
||||
DECLS:
|
||||
for _, cmt := range sd.Decl.Doc.List {
|
||||
for _, ln := range strings.Split(cmt.Text, "\n") {
|
||||
matches := rxResponseOverride.FindStringSubmatch(ln)
|
||||
if len(matches) > 0 {
|
||||
sd.annotated = true
|
||||
}
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
name = matches[1]
|
||||
break DECLS
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
sd.GoName = goName
|
||||
sd.Name = name
|
||||
return
|
||||
}
|
||||
|
||||
func newResponseParser(prog *loader.Program) *responseParser {
|
||||
return &responseParser{prog, nil, newSchemaParser(prog)}
|
||||
}
|
||||
|
||||
type responseParser struct {
|
||||
program *loader.Program
|
||||
postDecls []schemaDecl
|
||||
scp *schemaParser
|
||||
}
|
||||
|
||||
func (rp *responseParser) Parse(gofile *ast.File, target interface{}) error {
|
||||
tgt := target.(map[string]spec.Response)
|
||||
for _, decl := range gofile.Decls {
|
||||
switch x1 := decl.(type) {
|
||||
// Check for parameters at the package level.
|
||||
case *ast.GenDecl:
|
||||
for _, spc := range x1.Specs {
|
||||
switch x2 := spc.(type) {
|
||||
case *ast.TypeSpec:
|
||||
sd := newResponseDecl(gofile, x1, x2)
|
||||
if sd.hasAnnotation() {
|
||||
if err := rp.parseDecl(tgt, sd); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check for parameters inside functions.
|
||||
case *ast.FuncDecl:
|
||||
for _, b := range x1.Body.List {
|
||||
switch x2 := b.(type) {
|
||||
case *ast.DeclStmt:
|
||||
switch x3 := x2.Decl.(type) {
|
||||
case *ast.GenDecl:
|
||||
for _, spc := range x3.Specs {
|
||||
switch x4 := spc.(type) {
|
||||
case *ast.TypeSpec:
|
||||
sd := newResponseDecl(gofile, x3, x4)
|
||||
if sd.hasAnnotation() {
|
||||
if err := rp.parseDecl(tgt, sd); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rp *responseParser) parseDecl(responses map[string]spec.Response, decl responseDecl) error {
|
||||
// check if there is a swagger:parameters tag that is followed by one or more words,
|
||||
// these words are the ids of the operations this parameter struct applies to
|
||||
// once type name is found convert it to a schema, by looking up the schema in the
|
||||
// parameters dictionary that got passed into this parse method
|
||||
response := responses[decl.Name]
|
||||
resPtr := &response
|
||||
|
||||
// analyze doc comment for the model
|
||||
sp := new(sectionedParser)
|
||||
sp.setDescription = func(lines []string) { resPtr.Description = joinDropLast(lines) }
|
||||
if err := sp.Parse(decl.Decl.Doc); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// analyze struct body for fields etc
|
||||
// each exported struct field:
|
||||
// * gets a type mapped to a go primitive
|
||||
// * perhaps gets a format
|
||||
// * has to document the validations that apply for the type and the field
|
||||
// * when the struct field points to a model it becomes a ref: #/definitions/ModelName
|
||||
// * comments that aren't tags is used as the description
|
||||
if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok {
|
||||
if err := rp.parseStructType(decl.File, resPtr, tpe, make(map[string]struct{})); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
responses[decl.Name] = response
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rp *responseParser) parseEmbeddedStruct(gofile *ast.File, response *spec.Response, expr ast.Expr, seenPreviously map[string]struct{}) error {
|
||||
switch tpe := expr.(type) {
|
||||
case *ast.Ident:
|
||||
// do lookup of type
|
||||
// take primitives into account, they should result in an error for swagger
|
||||
pkg, err := rp.scp.packageForFile(gofile, tpe)
|
||||
if err != nil {
|
||||
return fmt.Errorf("embedded struct: %v", err)
|
||||
}
|
||||
file, _, ts, err := findSourceFile(pkg, tpe.Name)
|
||||
if err != nil {
|
||||
return fmt.Errorf("embedded struct: %v", err)
|
||||
}
|
||||
if st, ok := ts.Type.(*ast.StructType); ok {
|
||||
return rp.parseStructType(file, response, st, seenPreviously)
|
||||
}
|
||||
case *ast.SelectorExpr:
|
||||
// look up package, file and then type
|
||||
pkg, err := rp.scp.packageForSelector(gofile, tpe.X)
|
||||
if err != nil {
|
||||
return fmt.Errorf("embedded struct: %v", err)
|
||||
}
|
||||
file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
|
||||
if err != nil {
|
||||
return fmt.Errorf("embedded struct: %v", err)
|
||||
}
|
||||
if st, ok := ts.Type.(*ast.StructType); ok {
|
||||
return rp.parseStructType(file, response, st, seenPreviously)
|
||||
}
|
||||
case *ast.StarExpr:
|
||||
return rp.parseEmbeddedStruct(gofile, response, tpe.X, seenPreviously)
|
||||
}
|
||||
fmt.Printf("1%#v\n", expr)
|
||||
return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
|
||||
}
|
||||
|
||||
func (rp *responseParser) parseStructType(gofile *ast.File, response *spec.Response, tpe *ast.StructType, seenPreviously map[string]struct{}) error {
|
||||
if tpe.Fields != nil {
|
||||
|
||||
seenProperties := seenPreviously
|
||||
|
||||
for _, fld := range tpe.Fields.List {
|
||||
if len(fld.Names) == 0 {
|
||||
// when the embedded struct is annotated with swagger:allOf it will be used as allOf property
|
||||
// otherwise the fields will just be included as normal properties
|
||||
if err := rp.parseEmbeddedStruct(gofile, response, fld.Type, seenProperties); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, fld := range tpe.Fields.List {
|
||||
if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
|
||||
nm, ignore, _, err := parseJSONTag(fld)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ignore {
|
||||
continue
|
||||
}
|
||||
|
||||
var in string
|
||||
// scan for param location first, this changes some behavior down the line
|
||||
if fld.Doc != nil {
|
||||
for _, cmt := range fld.Doc.List {
|
||||
for _, line := range strings.Split(cmt.Text, "\n") {
|
||||
matches := rxIn.FindStringSubmatch(line)
|
||||
if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
|
||||
in = strings.TrimSpace(matches[1])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ps := response.Headers[nm]
|
||||
|
||||
// support swagger:file for response
|
||||
// An API operation can return a file, such as an image or PDF. In this case,
|
||||
// define the response schema with type: file and specify the appropriate MIME types in the produces section.
|
||||
if fld.Doc != nil && fileParam(fld.Doc) {
|
||||
response.Schema = &spec.Schema{}
|
||||
response.Schema.Typed("file", "")
|
||||
} else if err := rp.scp.parseNamedType(gofile, fld.Type, responseTypable{in, &ps, response}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if strfmtName, ok := strfmtName(fld.Doc); ok {
|
||||
ps.Typed("string", strfmtName)
|
||||
}
|
||||
|
||||
sp := new(sectionedParser)
|
||||
sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
|
||||
sp.taggers = []tagParser{
|
||||
newSingleLineTagParser("maximum", &setMaximum{headerValidations{&ps}, rxf(rxMaximumFmt, "")}),
|
||||
newSingleLineTagParser("minimum", &setMinimum{headerValidations{&ps}, rxf(rxMinimumFmt, "")}),
|
||||
newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
|
||||
newSingleLineTagParser("minLength", &setMinLength{headerValidations{&ps}, rxf(rxMinLengthFmt, "")}),
|
||||
newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
|
||||
newSingleLineTagParser("pattern", &setPattern{headerValidations{&ps}, rxf(rxPatternFmt, "")}),
|
||||
newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
|
||||
newSingleLineTagParser("minItems", &setMinItems{headerValidations{&ps}, rxf(rxMinItemsFmt, "")}),
|
||||
newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
|
||||
newSingleLineTagParser("unique", &setUnique{headerValidations{&ps}, rxf(rxUniqueFmt, "")}),
|
||||
newSingleLineTagParser("enum", &setEnum{headerValidations{&ps}, rxf(rxEnumFmt, "")}),
|
||||
newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxDefaultFmt, "")}),
|
||||
newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxExampleFmt, "")}),
|
||||
}
|
||||
itemsTaggers := func(items *spec.Items, level int) []tagParser {
|
||||
// the expression is 1-index based not 0-index
|
||||
itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
|
||||
|
||||
return []tagParser{
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
|
||||
newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
|
||||
}
|
||||
}
|
||||
|
||||
var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
|
||||
parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
|
||||
if items == nil {
|
||||
return []tagParser{}, nil
|
||||
}
|
||||
switch iftpe := expr.(type) {
|
||||
case *ast.ArrayType:
|
||||
eleTaggers := itemsTaggers(items, level)
|
||||
sp.taggers = append(eleTaggers, sp.taggers...)
|
||||
otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return otherTaggers, nil
|
||||
case *ast.Ident:
|
||||
taggers := []tagParser{}
|
||||
if iftpe.Obj == nil {
|
||||
taggers = itemsTaggers(items, level)
|
||||
}
|
||||
otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return append(taggers, otherTaggers...), nil
|
||||
case *ast.StarExpr:
|
||||
otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return otherTaggers, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown field type ele for %q", nm)
|
||||
}
|
||||
}
|
||||
// check if this is a primitive, if so parse the validations from the
|
||||
// doc comments of the slice declaration.
|
||||
if ftped, ok := fld.Type.(*ast.ArrayType); ok {
|
||||
taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sp.taggers = append(taggers, sp.taggers...)
|
||||
}
|
||||
|
||||
if err := sp.Parse(fld.Doc); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if in != "body" {
|
||||
seenProperties[nm] = struct{}{}
|
||||
if response.Headers == nil {
|
||||
response.Headers = make(map[string]spec.Header)
|
||||
}
|
||||
response.Headers[nm] = ps
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for k := range response.Headers {
|
||||
if _, ok := seenProperties[k]; !ok {
|
||||
delete(response.Headers, k)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
252
vendor/github.com/go-swagger/go-swagger/scan/route_params.go
generated
vendored
Normal file
252
vendor/github.com/go-swagger/go-swagger/scan/route_params.go
generated
vendored
Normal file
|
@ -0,0 +1,252 @@
|
|||
// +build !go1.11
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
const (
|
||||
// ParamDescriptionKey indicates the tag used to define a parameter description in swagger:route
|
||||
ParamDescriptionKey = "description"
|
||||
// ParamNameKey indicates the tag used to define a parameter name in swagger:route
|
||||
ParamNameKey = "name"
|
||||
// ParamInKey indicates the tag used to define a parameter location in swagger:route
|
||||
ParamInKey = "in"
|
||||
// ParamRequiredKey indicates the tag used to declare whether a parameter is required in swagger:route
|
||||
ParamRequiredKey = "required"
|
||||
// ParamTypeKey indicates the tag used to define the parameter type in swagger:route
|
||||
ParamTypeKey = "type"
|
||||
// ParamAllowEmptyKey indicates the tag used to indicate whether a parameter allows empty values in swagger:route
|
||||
ParamAllowEmptyKey = "allowempty"
|
||||
|
||||
// SchemaMinKey indicates the tag used to indicate the minimum value allowed for this type in swagger:route
|
||||
SchemaMinKey = "min"
|
||||
// SchemaMaxKey indicates the tag used to indicate the maximum value allowed for this type in swagger:route
|
||||
SchemaMaxKey = "max"
|
||||
// SchemaEnumKey indicates the tag used to specify the allowed values for this type in swagger:route
|
||||
SchemaEnumKey = "enum"
|
||||
// SchemaFormatKey indicates the expected format for this field in swagger:route
|
||||
SchemaFormatKey = "format"
|
||||
// SchemaDefaultKey indicates the default value for this field in swagger:route
|
||||
SchemaDefaultKey = "default"
|
||||
// SchemaMinLenKey indicates the minimum length this field in swagger:route
|
||||
SchemaMinLenKey = "minlength"
|
||||
// SchemaMaxLenKey indicates the minimum length this field in swagger:route
|
||||
SchemaMaxLenKey = "maxlength"
|
||||
|
||||
// TypeArray is the identifier for an array type in swagger:route
|
||||
TypeArray = "array"
|
||||
// TypeNumber is the identifier for a number type in swagger:route
|
||||
TypeNumber = "number"
|
||||
// TypeInteger is the identifier for an integer type in swagger:route
|
||||
TypeInteger = "integer"
|
||||
// TypeBoolean is the identifier for a boolean type in swagger:route
|
||||
TypeBoolean = "boolean"
|
||||
// TypeBool is the identifier for a boolean type in swagger:route
|
||||
TypeBool = "bool"
|
||||
// TypeObject is the identifier for an object type in swagger:route
|
||||
TypeObject = "object"
|
||||
// TypeString is the identifier for a string type in swagger:route
|
||||
TypeString = "string"
|
||||
)
|
||||
|
||||
var (
|
||||
validIn = []string{"path", "query", "header", "body", "form"}
|
||||
basicTypes = []string{TypeInteger, TypeNumber, TypeString, TypeBoolean, TypeBool, TypeArray}
|
||||
)
|
||||
|
||||
func newSetParams(params []*spec.Parameter, setter func([]*spec.Parameter)) *setOpParams {
|
||||
return &setOpParams{
|
||||
set: setter,
|
||||
parameters: params,
|
||||
}
|
||||
}
|
||||
|
||||
type setOpParams struct {
|
||||
set func([]*spec.Parameter)
|
||||
parameters []*spec.Parameter
|
||||
}
|
||||
|
||||
func (s *setOpParams) Matches(line string) bool {
|
||||
return rxParameters.MatchString(line)
|
||||
}
|
||||
|
||||
func (s *setOpParams) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
|
||||
var current *spec.Parameter
|
||||
var extraData map[string]string
|
||||
|
||||
for _, line := range lines {
|
||||
l := strings.TrimSpace(line)
|
||||
|
||||
if strings.HasPrefix(l, "+") {
|
||||
s.finalizeParam(current, extraData)
|
||||
current = new(spec.Parameter)
|
||||
extraData = make(map[string]string)
|
||||
l = strings.TrimPrefix(l, "+")
|
||||
}
|
||||
|
||||
kv := strings.SplitN(l, ":", 2)
|
||||
|
||||
if len(kv) <= 1 {
|
||||
continue
|
||||
}
|
||||
|
||||
key := strings.ToLower(strings.TrimSpace(kv[0]))
|
||||
value := strings.TrimSpace(kv[1])
|
||||
|
||||
if current == nil {
|
||||
return errors.New("invalid route/operation schema provided")
|
||||
}
|
||||
|
||||
switch key {
|
||||
case ParamDescriptionKey:
|
||||
current.Description = value
|
||||
case ParamNameKey:
|
||||
current.Name = value
|
||||
case ParamInKey:
|
||||
v := strings.ToLower(value)
|
||||
if contains(validIn, v) {
|
||||
current.In = v
|
||||
}
|
||||
case ParamRequiredKey:
|
||||
if v, err := strconv.ParseBool(value); err == nil {
|
||||
current.Required = v
|
||||
}
|
||||
case ParamTypeKey:
|
||||
if current.Schema == nil {
|
||||
current.Schema = new(spec.Schema)
|
||||
}
|
||||
if contains(basicTypes, value) {
|
||||
current.Type = strings.ToLower(value)
|
||||
if current.Type == TypeBool {
|
||||
current.Type = TypeBoolean
|
||||
}
|
||||
} else {
|
||||
if ref, err := spec.NewRef("#/definitions/" + value); err == nil {
|
||||
current.Type = TypeObject
|
||||
current.Schema.Ref = ref
|
||||
}
|
||||
}
|
||||
current.Schema.Type = spec.StringOrArray{current.Type}
|
||||
case ParamAllowEmptyKey:
|
||||
if v, err := strconv.ParseBool(value); err == nil {
|
||||
current.AllowEmptyValue = v
|
||||
}
|
||||
default:
|
||||
extraData[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
s.finalizeParam(current, extraData)
|
||||
s.set(s.parameters)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]string) {
|
||||
if param == nil {
|
||||
return
|
||||
}
|
||||
|
||||
processSchema(data, param)
|
||||
s.parameters = append(s.parameters, param)
|
||||
}
|
||||
|
||||
func processSchema(data map[string]string, param *spec.Parameter) {
|
||||
if param.Schema == nil {
|
||||
return
|
||||
}
|
||||
|
||||
var enumValues []string
|
||||
|
||||
for key, value := range data {
|
||||
switch key {
|
||||
case SchemaMinKey:
|
||||
if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
|
||||
v, _ := strconv.ParseFloat(value, 64)
|
||||
param.Schema.Minimum = &v
|
||||
}
|
||||
case SchemaMaxKey:
|
||||
if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
|
||||
v, _ := strconv.ParseFloat(value, 64)
|
||||
param.Schema.Maximum = &v
|
||||
}
|
||||
case SchemaMinLenKey:
|
||||
if getType(param.Schema) == TypeArray {
|
||||
v, _ := strconv.ParseInt(value, 10, 64)
|
||||
param.Schema.MinLength = &v
|
||||
}
|
||||
case SchemaMaxLenKey:
|
||||
if getType(param.Schema) == TypeArray {
|
||||
v, _ := strconv.ParseInt(value, 10, 64)
|
||||
param.Schema.MaxLength = &v
|
||||
}
|
||||
case SchemaEnumKey:
|
||||
enumValues = strings.Split(value, ",")
|
||||
case SchemaFormatKey:
|
||||
param.Schema.Format = value
|
||||
case SchemaDefaultKey:
|
||||
param.Schema.Default = convert(param.Type, value)
|
||||
}
|
||||
}
|
||||
|
||||
if param.Description != "" {
|
||||
param.Schema.Description = param.Description
|
||||
}
|
||||
|
||||
convertEnum(param.Schema, enumValues)
|
||||
}
|
||||
|
||||
func convertEnum(schema *spec.Schema, enumValues []string) {
|
||||
if len(enumValues) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
var finalEnum []interface{}
|
||||
for _, v := range enumValues {
|
||||
finalEnum = append(finalEnum, convert(schema.Type[0], strings.TrimSpace(v)))
|
||||
}
|
||||
schema.Enum = finalEnum
|
||||
}
|
||||
|
||||
func convert(typeStr, valueStr string) interface{} {
|
||||
switch typeStr {
|
||||
case TypeInteger:
|
||||
fallthrough
|
||||
case TypeNumber:
|
||||
if num, err := strconv.ParseFloat(valueStr, 64); err == nil {
|
||||
return num
|
||||
}
|
||||
case TypeBoolean:
|
||||
fallthrough
|
||||
case TypeBool:
|
||||
if b, err := strconv.ParseBool(valueStr); err == nil {
|
||||
return b
|
||||
}
|
||||
}
|
||||
return valueStr
|
||||
}
|
||||
|
||||
func getType(schema *spec.Schema) string {
|
||||
if len(schema.Type) == 0 {
|
||||
return ""
|
||||
}
|
||||
return schema.Type[0]
|
||||
}
|
||||
|
||||
func contains(arr []string, obj string) bool {
|
||||
for _, v := range arr {
|
||||
if v == obj {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
138
vendor/github.com/go-swagger/go-swagger/scan/routes.go
generated
vendored
Normal file
138
vendor/github.com/go-swagger/go-swagger/scan/routes.go
generated
vendored
Normal file
|
@ -0,0 +1,138 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
|
||||
"golang.org/x/tools/go/loader"
|
||||
)
|
||||
|
||||
func opConsumesSetter(op *spec.Operation) func([]string) {
|
||||
return func(consumes []string) { op.Consumes = consumes }
|
||||
}
|
||||
|
||||
func opProducesSetter(op *spec.Operation) func([]string) {
|
||||
return func(produces []string) { op.Produces = produces }
|
||||
}
|
||||
|
||||
func opSchemeSetter(op *spec.Operation) func([]string) {
|
||||
return func(schemes []string) { op.Schemes = schemes }
|
||||
}
|
||||
|
||||
func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) {
|
||||
return func(securityDefs []map[string][]string) { op.Security = securityDefs }
|
||||
}
|
||||
|
||||
func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) {
|
||||
return func(def *spec.Response, scr map[int]spec.Response) {
|
||||
if op.Responses == nil {
|
||||
op.Responses = new(spec.Responses)
|
||||
}
|
||||
op.Responses.Default = def
|
||||
op.Responses.StatusCodeResponses = scr
|
||||
}
|
||||
}
|
||||
|
||||
func opParamSetter(op *spec.Operation) func([]*spec.Parameter) {
|
||||
return func(params []*spec.Parameter) {
|
||||
for _, v := range params {
|
||||
op.AddParam(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func newRoutesParser(prog *loader.Program) *routesParser {
|
||||
return &routesParser{
|
||||
program: prog,
|
||||
}
|
||||
}
|
||||
|
||||
type routesParser struct {
|
||||
program *loader.Program
|
||||
definitions map[string]spec.Schema
|
||||
operations map[string]*spec.Operation
|
||||
responses map[string]spec.Response
|
||||
parameters []*spec.Parameter
|
||||
}
|
||||
|
||||
func (rp *routesParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error {
|
||||
tgt := target.(*spec.Paths)
|
||||
for _, comsec := range gofile.Comments {
|
||||
content := parsePathAnnotation(rxRoute, comsec.List)
|
||||
|
||||
if content.Method == "" {
|
||||
continue // it's not, next!
|
||||
}
|
||||
|
||||
if !shouldAcceptTag(content.Tags, includeTags, excludeTags) {
|
||||
if Debug {
|
||||
fmt.Printf("route %s %s is ignored due to tag rules\n", content.Method, content.Path)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
pthObj := tgt.Paths[content.Path]
|
||||
op := setPathOperation(
|
||||
content.Method, content.ID,
|
||||
&pthObj, rp.operations[content.ID])
|
||||
|
||||
op.Tags = content.Tags
|
||||
|
||||
sp := new(sectionedParser)
|
||||
sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
|
||||
sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
|
||||
sr := newSetResponses(rp.definitions, rp.responses, opResponsesSetter(op))
|
||||
spa := newSetParams(rp.parameters, opParamSetter(op))
|
||||
sp.taggers = []tagParser{
|
||||
newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, opConsumesSetter(op)), false),
|
||||
newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, opProducesSetter(op)), false),
|
||||
newSingleLineTagParser("Schemes", newSetSchemes(opSchemeSetter(op))),
|
||||
newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, opSecurityDefsSetter(op)), false),
|
||||
newMultiLineTagParser("Parameters", spa, false),
|
||||
newMultiLineTagParser("Responses", sr, false),
|
||||
}
|
||||
if err := sp.Parse(content.Remaining); err != nil {
|
||||
return fmt.Errorf("operation (%s): %v", op.ID, err)
|
||||
}
|
||||
|
||||
if tgt.Paths == nil {
|
||||
tgt.Paths = make(map[string]spec.PathItem)
|
||||
}
|
||||
tgt.Paths[content.Path] = pthObj
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func shouldAcceptTag(tags []string, includeTags map[string]bool, excludeTags map[string]bool) bool {
|
||||
for _, tag := range tags {
|
||||
if len(includeTags) > 0 {
|
||||
if includeTags[tag] {
|
||||
return true
|
||||
}
|
||||
} else if len(excludeTags) > 0 {
|
||||
if excludeTags[tag] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return len(includeTags) <= 0
|
||||
}
|
951
vendor/github.com/go-swagger/go-swagger/scan/scanner.go
generated
vendored
Normal file
951
vendor/github.com/go-swagger/go-swagger/scan/scanner.go
generated
vendored
Normal file
|
@ -0,0 +1,951 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
goparser "go/parser"
|
||||
"go/types"
|
||||
"log"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/loads/fmts"
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/swag"
|
||||
"golang.org/x/tools/go/loader"
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
const (
|
||||
rxMethod = "(\\p{L}+)"
|
||||
rxPath = "((?:/[\\p{L}\\p{N}\\p{Pd}\\p{Pc}{}\\-\\.\\?_~%!$&'()*+,;=:@/]*)+/?)"
|
||||
rxOpTags = "(\\p{L}[\\p{L}\\p{N}\\p{Pd}\\.\\p{Pc}\\p{Zs}]+)"
|
||||
rxOpID = "((?:\\p{L}[\\p{L}\\p{N}\\p{Pd}\\p{Pc}]+)+)"
|
||||
|
||||
rxMaximumFmt = "%s[Mm]ax(?:imum)?\\p{Zs}*:\\p{Zs}*([\\<=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
|
||||
rxMinimumFmt = "%s[Mm]in(?:imum)?\\p{Zs}*:\\p{Zs}*([\\>=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
|
||||
rxMultipleOfFmt = "%s[Mm]ultiple\\p{Zs}*[Oo]f\\p{Zs}*:\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
|
||||
|
||||
rxMaxLengthFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
|
||||
rxMinLengthFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
|
||||
rxPatternFmt = "%s[Pp]attern\\p{Zs}*:\\p{Zs}*(.*)$"
|
||||
rxCollectionFormatFmt = "%s[Cc]ollection(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ff]ormat)\\p{Zs}*:\\p{Zs}*(.*)$"
|
||||
rxEnumFmt = "%s[Ee]num\\p{Zs}*:\\p{Zs}*(.*)$"
|
||||
rxDefaultFmt = "%s[Dd]efault\\p{Zs}*:\\p{Zs}*(.*)$"
|
||||
rxExampleFmt = "%s[Ee]xample\\p{Zs}*:\\p{Zs}*(.*)$"
|
||||
|
||||
rxMaxItemsFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
|
||||
rxMinItemsFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
|
||||
rxUniqueFmt = "%s[Uu]nique\\p{Zs}*:\\p{Zs}*(true|false)$"
|
||||
|
||||
rxItemsPrefixFmt = "(?:[Ii]tems[\\.\\p{Zs}]*){%d}"
|
||||
)
|
||||
|
||||
var (
|
||||
rxSwaggerAnnotation = regexp.MustCompile(`swagger:([\p{L}\p{N}\p{Pd}\p{Pc}]+)`)
|
||||
rxFileUpload = regexp.MustCompile(`swagger:file`)
|
||||
rxStrFmt = regexp.MustCompile(`swagger:strfmt\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
|
||||
rxAlias = regexp.MustCompile(`swagger:alias`)
|
||||
rxName = regexp.MustCompile(`swagger:name\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)$`)
|
||||
rxAllOf = regexp.MustCompile(`swagger:allOf\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)?$`)
|
||||
rxModelOverride = regexp.MustCompile(`swagger:model\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
|
||||
rxResponseOverride = regexp.MustCompile(`swagger:response\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
|
||||
rxParametersOverride = regexp.MustCompile(`swagger:parameters\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\p{Zs}]+)$`)
|
||||
rxEnum = regexp.MustCompile(`swagger:enum\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
|
||||
rxIgnoreOverride = regexp.MustCompile(`swagger:ignore\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
|
||||
rxDefault = regexp.MustCompile(`swagger:default\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
|
||||
rxType = regexp.MustCompile(`swagger:type\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
|
||||
rxRoute = regexp.MustCompile(
|
||||
"swagger:route\\p{Zs}*" +
|
||||
rxMethod +
|
||||
"\\p{Zs}*" +
|
||||
rxPath +
|
||||
"(?:\\p{Zs}+" +
|
||||
rxOpTags +
|
||||
")?\\p{Zs}+" +
|
||||
rxOpID + "\\p{Zs}*$")
|
||||
rxBeginYAMLSpec = regexp.MustCompile(`---\p{Zs}*$`)
|
||||
rxUncommentHeaders = regexp.MustCompile(`^[\p{Zs}\t/\*-]*\|?`)
|
||||
rxUncommentYAML = regexp.MustCompile(`^[\p{Zs}\t]*/*`)
|
||||
rxOperation = regexp.MustCompile(
|
||||
"swagger:operation\\p{Zs}*" +
|
||||
rxMethod +
|
||||
"\\p{Zs}*" +
|
||||
rxPath +
|
||||
"(?:\\p{Zs}+" +
|
||||
rxOpTags +
|
||||
")?\\p{Zs}+" +
|
||||
rxOpID + "\\p{Zs}*$")
|
||||
|
||||
rxSpace = regexp.MustCompile(`\p{Zs}+`)
|
||||
rxIndent = regexp.MustCompile(`\p{Zs}*/*\p{Zs}*[^\p{Zs}]`)
|
||||
rxPunctuationEnd = regexp.MustCompile(`\p{Po}$`)
|
||||
rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`)
|
||||
rxStripTitleComments = regexp.MustCompile(`^[^\p{L}]*[Pp]ackage\p{Zs}+[^\p{Zs}]+\p{Zs}*`)
|
||||
rxAllowedExtensions = regexp.MustCompile(`^[Xx]-`)
|
||||
|
||||
rxIn = regexp.MustCompile(`[Ii]n\p{Zs}*:\p{Zs}*(query|path|header|body|formData)$`)
|
||||
rxRequired = regexp.MustCompile(`[Rr]equired\p{Zs}*:\p{Zs}*(true|false)$`)
|
||||
rxDiscriminator = regexp.MustCompile(`[Dd]iscriminator\p{Zs}*:\p{Zs}*(true|false)$`)
|
||||
rxReadOnly = regexp.MustCompile(`[Rr]ead(?:\p{Zs}*|[\p{Pd}\p{Pc}])?[Oo]nly\p{Zs}*:\p{Zs}*(true|false)$`)
|
||||
rxConsumes = regexp.MustCompile(`[Cc]onsumes\p{Zs}*:`)
|
||||
rxProduces = regexp.MustCompile(`[Pp]roduces\p{Zs}*:`)
|
||||
rxSecuritySchemes = regexp.MustCompile(`[Ss]ecurity\p{Zs}*:`)
|
||||
rxSecurity = regexp.MustCompile(`[Ss]ecurity\p{Zs}*[Dd]efinitions:`)
|
||||
rxResponses = regexp.MustCompile(`[Rr]esponses\p{Zs}*:`)
|
||||
rxParameters = regexp.MustCompile(`[Pp]arameters\p{Zs}*:`)
|
||||
rxSchemes = regexp.MustCompile(`[Ss]chemes\p{Zs}*:\p{Zs}*((?:(?:https?|HTTPS?|wss?|WSS?)[\p{Zs},]*)+)$`)
|
||||
rxVersion = regexp.MustCompile(`[Vv]ersion\p{Zs}*:\p{Zs}*(.+)$`)
|
||||
rxHost = regexp.MustCompile(`[Hh]ost\p{Zs}*:\p{Zs}*(.+)$`)
|
||||
rxBasePath = regexp.MustCompile(`[Bb]ase\p{Zs}*-*[Pp]ath\p{Zs}*:\p{Zs}*` + rxPath + "$")
|
||||
rxLicense = regexp.MustCompile(`[Ll]icense\p{Zs}*:\p{Zs}*(.+)$`)
|
||||
rxContact = regexp.MustCompile(`[Cc]ontact\p{Zs}*-?(?:[Ii]info\p{Zs}*)?:\p{Zs}*(.+)$`)
|
||||
rxTOS = regexp.MustCompile(`[Tt](:?erms)?\p{Zs}*-?[Oo]f?\p{Zs}*-?[Ss](?:ervice)?\p{Zs}*:`)
|
||||
rxExtensions = regexp.MustCompile(`[Ee]xtensions\p{Zs}*:`)
|
||||
rxInfoExtensions = regexp.MustCompile(`[In]nfo\p{Zs}*[Ee]xtensions:`)
|
||||
// currently unused: rxExample = regexp.MustCompile(`[Ex]ample\p{Zs}*:\p{Zs}*(.*)$`)
|
||||
)
|
||||
|
||||
// Many thanks go to https://github.com/yvasiyarov/swagger
|
||||
// this is loosely based on that implementation but for swagger 2.0
|
||||
|
||||
func joinDropLast(lines []string) string {
|
||||
l := len(lines)
|
||||
lns := lines
|
||||
if l > 0 && len(strings.TrimSpace(lines[l-1])) == 0 {
|
||||
lns = lines[:l-1]
|
||||
}
|
||||
return strings.Join(lns, "\n")
|
||||
}
|
||||
|
||||
func removeEmptyLines(lines []string) (notEmpty []string) {
|
||||
for _, l := range lines {
|
||||
if len(strings.TrimSpace(l)) > 0 {
|
||||
notEmpty = append(notEmpty, l)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func rxf(rxp, ar string) *regexp.Regexp {
|
||||
return regexp.MustCompile(fmt.Sprintf(rxp, ar))
|
||||
}
|
||||
|
||||
// The Opts for the application scanner.
|
||||
type Opts struct {
|
||||
BasePath string
|
||||
Input *spec.Swagger
|
||||
ScanModels bool
|
||||
BuildTags string
|
||||
Include []string
|
||||
Exclude []string
|
||||
IncludeTags []string
|
||||
ExcludeTags []string
|
||||
}
|
||||
|
||||
func safeConvert(str string) bool {
|
||||
b, err := swag.ConvertBool(str)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// Debug is true when process is run with DEBUG=1 env var
|
||||
var Debug = safeConvert(os.Getenv("DEBUG"))
|
||||
|
||||
// Application scans the application and builds a swagger spec based on the information from the code files.
|
||||
// When there are includes provided, only those files are considered for the initial discovery.
|
||||
// Similarly the excludes will exclude an item from initial discovery through scanning for annotations.
|
||||
// When something in the discovered items requires a type that is contained in the includes or excludes it will still be
|
||||
// in the spec.
|
||||
func Application(opts Opts) (*spec.Swagger, error) {
|
||||
parser, err := newAppScanner(&opts)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return parser.Parse()
|
||||
}
|
||||
|
||||
// appScanner the global context for scanning a go application
|
||||
// into a swagger specification
|
||||
type appScanner struct {
|
||||
loader *loader.Config
|
||||
prog *loader.Program
|
||||
classifier *programClassifier
|
||||
discovered []schemaDecl
|
||||
input *spec.Swagger
|
||||
definitions map[string]spec.Schema
|
||||
responses map[string]spec.Response
|
||||
operations map[string]*spec.Operation
|
||||
scanModels bool
|
||||
includeTags map[string]bool
|
||||
excludeTas map[string]bool
|
||||
|
||||
// MainPackage the path to find the main class in
|
||||
MainPackage string
|
||||
}
|
||||
|
||||
// newAppScanner creates a new api parser
|
||||
func newAppScanner(opts *Opts) (*appScanner, error) {
|
||||
if Debug {
|
||||
log.Println("scanning packages discovered through entrypoint @ ", opts.BasePath)
|
||||
}
|
||||
var ldr loader.Config
|
||||
ldr.ParserMode = goparser.ParseComments
|
||||
ldr.Import(opts.BasePath)
|
||||
if opts.BuildTags != "" {
|
||||
ldr.Build = &build.Default
|
||||
ldr.Build.BuildTags = strings.Split(opts.BuildTags, ",")
|
||||
}
|
||||
ldr.TypeChecker = types.Config{FakeImportC: true}
|
||||
prog, err := ldr.Load()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var includes, excludes packageFilters
|
||||
if len(opts.Include) > 0 {
|
||||
for _, include := range opts.Include {
|
||||
includes = append(includes, packageFilter{Name: include})
|
||||
}
|
||||
}
|
||||
if len(opts.Exclude) > 0 {
|
||||
for _, exclude := range opts.Exclude {
|
||||
excludes = append(excludes, packageFilter{Name: exclude})
|
||||
}
|
||||
}
|
||||
includeTags := make(map[string]bool)
|
||||
for _, includeTag := range opts.IncludeTags {
|
||||
includeTags[includeTag] = true
|
||||
}
|
||||
excludeTags := make(map[string]bool)
|
||||
for _, excludeTag := range opts.ExcludeTags {
|
||||
excludeTags[excludeTag] = true
|
||||
}
|
||||
|
||||
input := opts.Input
|
||||
if input == nil {
|
||||
input = new(spec.Swagger)
|
||||
input.Swagger = "2.0"
|
||||
}
|
||||
|
||||
if input.Paths == nil {
|
||||
input.Paths = new(spec.Paths)
|
||||
}
|
||||
if input.Definitions == nil {
|
||||
input.Definitions = make(map[string]spec.Schema)
|
||||
}
|
||||
if input.Responses == nil {
|
||||
input.Responses = make(map[string]spec.Response)
|
||||
}
|
||||
if input.Extensions == nil {
|
||||
input.Extensions = make(spec.Extensions)
|
||||
}
|
||||
|
||||
return &appScanner{
|
||||
MainPackage: opts.BasePath,
|
||||
prog: prog,
|
||||
input: input,
|
||||
loader: &ldr,
|
||||
operations: collectOperationsFromInput(input),
|
||||
definitions: input.Definitions,
|
||||
responses: input.Responses,
|
||||
scanModels: opts.ScanModels,
|
||||
classifier: &programClassifier{
|
||||
Includes: includes,
|
||||
Excludes: excludes,
|
||||
},
|
||||
includeTags: includeTags,
|
||||
excludeTas: excludeTags,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation {
|
||||
operations := make(map[string]*spec.Operation)
|
||||
if input != nil && input.Paths != nil {
|
||||
for _, pth := range input.Paths.Paths {
|
||||
if pth.Get != nil {
|
||||
operations[pth.Get.ID] = pth.Get
|
||||
}
|
||||
if pth.Post != nil {
|
||||
operations[pth.Post.ID] = pth.Post
|
||||
}
|
||||
if pth.Put != nil {
|
||||
operations[pth.Put.ID] = pth.Put
|
||||
}
|
||||
if pth.Patch != nil {
|
||||
operations[pth.Patch.ID] = pth.Patch
|
||||
}
|
||||
if pth.Delete != nil {
|
||||
operations[pth.Delete.ID] = pth.Delete
|
||||
}
|
||||
if pth.Head != nil {
|
||||
operations[pth.Head.ID] = pth.Head
|
||||
}
|
||||
if pth.Options != nil {
|
||||
operations[pth.Options.ID] = pth.Options
|
||||
}
|
||||
}
|
||||
}
|
||||
return operations
|
||||
}
|
||||
|
||||
// Parse produces a swagger object for an application
|
||||
func (a *appScanner) Parse() (*spec.Swagger, error) {
|
||||
// classification still includes files that are completely commented out
|
||||
cp, err := a.classifier.Classify(a.prog)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// build models dictionary
|
||||
if a.scanModels {
|
||||
for _, modelsFile := range cp.Models {
|
||||
if err := a.parseSchema(modelsFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// build parameters dictionary
|
||||
for _, paramsFile := range cp.Parameters {
|
||||
if err := a.parseParameters(paramsFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// build responses dictionary
|
||||
for _, responseFile := range cp.Responses {
|
||||
if err := a.parseResponses(responseFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// build definitions dictionary
|
||||
if err := a.processDiscovered(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// build paths dictionary
|
||||
for _, routeFile := range cp.Routes {
|
||||
if err := a.parseRoutes(routeFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
for _, operationFile := range cp.Operations {
|
||||
if err := a.parseOperations(operationFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// build swagger object
|
||||
for _, metaFile := range cp.Meta {
|
||||
if err := a.parseMeta(metaFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if a.input.Swagger == "" {
|
||||
a.input.Swagger = "2.0"
|
||||
}
|
||||
|
||||
return a.input, nil
|
||||
}
|
||||
|
||||
func (a *appScanner) processDiscovered() error {
|
||||
// loop over discovered until all the items are in definitions
|
||||
keepGoing := len(a.discovered) > 0
|
||||
for keepGoing {
|
||||
var queue []schemaDecl
|
||||
for _, d := range a.discovered {
|
||||
if _, ok := a.definitions[d.Name]; !ok {
|
||||
queue = append(queue, d)
|
||||
}
|
||||
}
|
||||
a.discovered = nil
|
||||
for _, sd := range queue {
|
||||
if err := a.parseDiscoveredSchema(sd); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
keepGoing = len(a.discovered) > 0
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *appScanner) parseSchema(file *ast.File) error {
|
||||
sp := newSchemaParser(a.prog)
|
||||
if err := sp.Parse(file, a.definitions); err != nil {
|
||||
return err
|
||||
}
|
||||
a.discovered = append(a.discovered, sp.postDecls...)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *appScanner) parseDiscoveredSchema(sd schemaDecl) error {
|
||||
sp := newSchemaParser(a.prog)
|
||||
sp.discovered = &sd
|
||||
|
||||
if err := sp.Parse(sd.File, a.definitions); err != nil {
|
||||
return err
|
||||
}
|
||||
a.discovered = append(a.discovered, sp.postDecls...)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *appScanner) parseRoutes(file *ast.File) error {
|
||||
rp := newRoutesParser(a.prog)
|
||||
rp.operations = a.operations
|
||||
rp.definitions = a.definitions
|
||||
rp.responses = a.responses
|
||||
|
||||
return rp.Parse(file, a.input.Paths, a.includeTags, a.excludeTas)
|
||||
}
|
||||
|
||||
func (a *appScanner) parseOperations(file *ast.File) error {
|
||||
op := newOperationsParser(a.prog)
|
||||
op.operations = a.operations
|
||||
op.definitions = a.definitions
|
||||
op.responses = a.responses
|
||||
return op.Parse(file, a.input.Paths, a.includeTags, a.excludeTas)
|
||||
}
|
||||
|
||||
func (a *appScanner) parseParameters(file *ast.File) error {
|
||||
rp := newParameterParser(a.prog)
|
||||
if err := rp.Parse(file, a.operations); err != nil {
|
||||
return err
|
||||
}
|
||||
a.discovered = append(a.discovered, rp.postDecls...)
|
||||
a.discovered = append(a.discovered, rp.scp.postDecls...)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *appScanner) parseResponses(file *ast.File) error {
|
||||
rp := newResponseParser(a.prog)
|
||||
if err := rp.Parse(file, a.responses); err != nil {
|
||||
return err
|
||||
}
|
||||
a.discovered = append(a.discovered, rp.postDecls...)
|
||||
a.discovered = append(a.discovered, rp.scp.postDecls...)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *appScanner) parseMeta(file *ast.File) error {
|
||||
return newMetaParser(a.input).Parse(file.Doc)
|
||||
}
|
||||
|
||||
// MustExpandPackagePath gets the real package path on disk
|
||||
func (a *appScanner) MustExpandPackagePath(packagePath string) string {
|
||||
pkgRealpath := swag.FindInGoSearchPath(packagePath)
|
||||
if pkgRealpath == "" {
|
||||
log.Fatalf("Can't find package %s \n", packagePath)
|
||||
}
|
||||
|
||||
return pkgRealpath
|
||||
}
|
||||
|
||||
type swaggerTypable interface {
|
||||
Typed(string, string)
|
||||
SetRef(spec.Ref)
|
||||
Items() swaggerTypable
|
||||
Schema() *spec.Schema
|
||||
Level() int
|
||||
}
|
||||
|
||||
// Map all Go builtin types that have Json representation to Swagger/Json types.
|
||||
// See https://golang.org/pkg/builtin/ and http://swagger.io/specification/
|
||||
func swaggerSchemaForType(typeName string, prop swaggerTypable) error {
|
||||
switch typeName {
|
||||
case "bool":
|
||||
prop.Typed("boolean", "")
|
||||
case "byte":
|
||||
prop.Typed("integer", "uint8")
|
||||
case "complex128", "complex64":
|
||||
return fmt.Errorf("unsupported builtin %q (no JSON marshaller)", typeName)
|
||||
case "error":
|
||||
// TODO: error is often marshalled into a string but not always (e.g. errors package creates
|
||||
// errors that are marshalled into an empty object), this could be handled the same way
|
||||
// custom JSON marshallers are handled (in future)
|
||||
prop.Typed("string", "")
|
||||
case "float32":
|
||||
prop.Typed("number", "float")
|
||||
case "float64":
|
||||
prop.Typed("number", "double")
|
||||
case "int":
|
||||
prop.Typed("integer", "int64")
|
||||
case "int16":
|
||||
prop.Typed("integer", "int16")
|
||||
case "int32":
|
||||
prop.Typed("integer", "int32")
|
||||
case "int64":
|
||||
prop.Typed("integer", "int64")
|
||||
case "int8":
|
||||
prop.Typed("integer", "int8")
|
||||
case "rune":
|
||||
prop.Typed("integer", "int32")
|
||||
case "string":
|
||||
prop.Typed("string", "")
|
||||
case "uint":
|
||||
prop.Typed("integer", "uint64")
|
||||
case "uint16":
|
||||
prop.Typed("integer", "uint16")
|
||||
case "uint32":
|
||||
prop.Typed("integer", "uint32")
|
||||
case "uint64":
|
||||
prop.Typed("integer", "uint64")
|
||||
case "uint8":
|
||||
prop.Typed("integer", "uint8")
|
||||
case "uintptr":
|
||||
prop.Typed("integer", "uint64")
|
||||
default:
|
||||
return fmt.Errorf("unsupported type %q", typeName)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func newMultiLineTagParser(name string, parser valueParser, skipCleanUp bool) tagParser {
|
||||
return tagParser{
|
||||
Name: name,
|
||||
MultiLine: true,
|
||||
SkipCleanUp: skipCleanUp,
|
||||
Parser: parser,
|
||||
}
|
||||
}
|
||||
|
||||
func newSingleLineTagParser(name string, parser valueParser) tagParser {
|
||||
return tagParser{
|
||||
Name: name,
|
||||
MultiLine: false,
|
||||
SkipCleanUp: false,
|
||||
Parser: parser,
|
||||
}
|
||||
}
|
||||
|
||||
type tagParser struct {
|
||||
Name string
|
||||
MultiLine bool
|
||||
SkipCleanUp bool
|
||||
Lines []string
|
||||
Parser valueParser
|
||||
}
|
||||
|
||||
func (st *tagParser) Matches(line string) bool {
|
||||
return st.Parser.Matches(line)
|
||||
}
|
||||
|
||||
func (st *tagParser) Parse(lines []string) error {
|
||||
return st.Parser.Parse(lines)
|
||||
}
|
||||
|
||||
func newYamlParser(rx *regexp.Regexp, setter func(json.RawMessage) error) valueParser {
|
||||
return &yamlParser{
|
||||
set: setter,
|
||||
rx: rx,
|
||||
}
|
||||
}
|
||||
|
||||
type yamlParser struct {
|
||||
set func(json.RawMessage) error
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (y *yamlParser) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
|
||||
var uncommented []string
|
||||
uncommented = append(uncommented, removeYamlIndent(lines)...)
|
||||
|
||||
yamlContent := strings.Join(uncommented, "\n")
|
||||
var yamlValue interface{}
|
||||
err := yaml.Unmarshal([]byte(yamlContent), &yamlValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var jsonValue json.RawMessage
|
||||
jsonValue, err = fmts.YAMLToJSON(yamlValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return y.set(jsonValue)
|
||||
}
|
||||
|
||||
func (y *yamlParser) Matches(line string) bool {
|
||||
return y.rx.MatchString(line)
|
||||
}
|
||||
|
||||
// aggregates lines in header until it sees `---`,
|
||||
// the beginning of a YAML spec
|
||||
type yamlSpecScanner struct {
|
||||
header []string
|
||||
yamlSpec []string
|
||||
setTitle func([]string)
|
||||
setDescription func([]string)
|
||||
workedOutTitle bool
|
||||
title []string
|
||||
skipHeader bool
|
||||
}
|
||||
|
||||
func cleanupScannerLines(lines []string, ur *regexp.Regexp, yamlBlock *regexp.Regexp) []string {
|
||||
// bail early when there is nothing to parse
|
||||
if len(lines) == 0 {
|
||||
return lines
|
||||
}
|
||||
seenLine := -1
|
||||
var lastContent int
|
||||
var uncommented []string
|
||||
var startBlock bool
|
||||
var yaml []string
|
||||
for i, v := range lines {
|
||||
if yamlBlock != nil && yamlBlock.MatchString(v) && !startBlock {
|
||||
startBlock = true
|
||||
if seenLine < 0 {
|
||||
seenLine = i
|
||||
}
|
||||
continue
|
||||
}
|
||||
if startBlock {
|
||||
if yamlBlock.MatchString(v) {
|
||||
startBlock = false
|
||||
uncommented = append(uncommented, removeIndent(yaml)...)
|
||||
continue
|
||||
}
|
||||
yaml = append(yaml, v)
|
||||
if v != "" {
|
||||
if seenLine < 0 {
|
||||
seenLine = i
|
||||
}
|
||||
lastContent = i
|
||||
}
|
||||
continue
|
||||
}
|
||||
str := ur.ReplaceAllString(v, "")
|
||||
uncommented = append(uncommented, str)
|
||||
if str != "" {
|
||||
if seenLine < 0 {
|
||||
seenLine = i
|
||||
}
|
||||
lastContent = i
|
||||
}
|
||||
}
|
||||
|
||||
// fixes issue #50
|
||||
if seenLine == -1 {
|
||||
return nil
|
||||
}
|
||||
return uncommented[seenLine : lastContent+1]
|
||||
}
|
||||
|
||||
// a shared function that can be used to split given headers
|
||||
// into a title and description
|
||||
func collectScannerTitleDescription(headers []string) (title, desc []string) {
|
||||
hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil)
|
||||
|
||||
idx := -1
|
||||
for i, line := range hdrs {
|
||||
if strings.TrimSpace(line) == "" {
|
||||
idx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if idx > -1 {
|
||||
title = hdrs[:idx]
|
||||
if len(hdrs) > idx+1 {
|
||||
desc = hdrs[idx+1:]
|
||||
} else {
|
||||
desc = nil
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if len(hdrs) > 0 {
|
||||
line := hdrs[0]
|
||||
if rxPunctuationEnd.MatchString(line) {
|
||||
title = []string{line}
|
||||
desc = hdrs[1:]
|
||||
} else {
|
||||
desc = hdrs
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (sp *yamlSpecScanner) collectTitleDescription() {
|
||||
if sp.workedOutTitle {
|
||||
return
|
||||
}
|
||||
if sp.setTitle == nil {
|
||||
sp.header = cleanupScannerLines(sp.header, rxUncommentHeaders, nil)
|
||||
return
|
||||
}
|
||||
|
||||
sp.workedOutTitle = true
|
||||
sp.title, sp.header = collectScannerTitleDescription(sp.header)
|
||||
}
|
||||
|
||||
func (sp *yamlSpecScanner) Title() []string {
|
||||
sp.collectTitleDescription()
|
||||
return sp.title
|
||||
}
|
||||
|
||||
func (sp *yamlSpecScanner) Description() []string {
|
||||
sp.collectTitleDescription()
|
||||
return sp.header
|
||||
}
|
||||
|
||||
func (sp *yamlSpecScanner) Parse(doc *ast.CommentGroup) error {
|
||||
if doc == nil {
|
||||
return nil
|
||||
}
|
||||
var startedYAMLSpec bool
|
||||
COMMENTS:
|
||||
for _, c := range doc.List {
|
||||
for _, line := range strings.Split(c.Text, "\n") {
|
||||
if rxSwaggerAnnotation.MatchString(line) {
|
||||
break COMMENTS // a new swagger: annotation terminates this parser
|
||||
}
|
||||
|
||||
if !startedYAMLSpec {
|
||||
if rxBeginYAMLSpec.MatchString(line) {
|
||||
startedYAMLSpec = true
|
||||
sp.yamlSpec = append(sp.yamlSpec, line)
|
||||
continue
|
||||
}
|
||||
|
||||
if !sp.skipHeader {
|
||||
sp.header = append(sp.header, line)
|
||||
}
|
||||
|
||||
// no YAML spec yet, moving on
|
||||
continue
|
||||
}
|
||||
|
||||
sp.yamlSpec = append(sp.yamlSpec, line)
|
||||
}
|
||||
}
|
||||
if sp.setTitle != nil {
|
||||
sp.setTitle(sp.Title())
|
||||
}
|
||||
if sp.setDescription != nil {
|
||||
sp.setDescription(sp.Description())
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sp *yamlSpecScanner) UnmarshalSpec(u func([]byte) error) (err error) {
|
||||
spec := cleanupScannerLines(sp.yamlSpec, rxUncommentYAML, nil)
|
||||
if len(spec) == 0 {
|
||||
return errors.New("no spec available to unmarshal")
|
||||
}
|
||||
|
||||
if !strings.Contains(spec[0], "---") {
|
||||
return errors.New("yaml spec has to start with `---`")
|
||||
}
|
||||
|
||||
// remove indentation
|
||||
spec = removeIndent(spec)
|
||||
|
||||
// 1. parse yaml lines
|
||||
yamlValue := make(map[interface{}]interface{})
|
||||
|
||||
yamlContent := strings.Join(spec, "\n")
|
||||
err = yaml.Unmarshal([]byte(yamlContent), &yamlValue)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// 2. convert to json
|
||||
var jsonValue json.RawMessage
|
||||
jsonValue, err = fmts.YAMLToJSON(yamlValue)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// 3. unmarshal the json into an interface
|
||||
var data []byte
|
||||
data, err = jsonValue.MarshalJSON()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
err = u(data)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// all parsed, returning...
|
||||
sp.yamlSpec = nil // spec is now consumed, so let's erase the parsed lines
|
||||
return
|
||||
}
|
||||
|
||||
// removes indent base on the first line
|
||||
func removeIndent(spec []string) []string {
|
||||
loc := rxIndent.FindStringIndex(spec[0])
|
||||
if loc[1] > 0 {
|
||||
for i := range spec {
|
||||
if len(spec[i]) >= loc[1] {
|
||||
spec[i] = spec[i][loc[1]-1:]
|
||||
}
|
||||
}
|
||||
}
|
||||
return spec
|
||||
}
|
||||
|
||||
// removes indent base on the first line
|
||||
func removeYamlIndent(spec []string) []string {
|
||||
loc := rxIndent.FindStringIndex(spec[0])
|
||||
var s []string
|
||||
if loc[1] > 0 {
|
||||
for i := range spec {
|
||||
if len(spec[i]) >= loc[1] {
|
||||
s = append(s, spec[i][loc[1]-1:])
|
||||
}
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// aggregates lines in header until it sees a tag.
|
||||
type sectionedParser struct {
|
||||
header []string
|
||||
matched map[string]tagParser
|
||||
annotation valueParser
|
||||
|
||||
seenTag bool
|
||||
skipHeader bool
|
||||
setTitle func([]string)
|
||||
setDescription func([]string)
|
||||
workedOutTitle bool
|
||||
taggers []tagParser
|
||||
currentTagger *tagParser
|
||||
title []string
|
||||
ignored bool
|
||||
}
|
||||
|
||||
func (st *sectionedParser) collectTitleDescription() {
|
||||
if st.workedOutTitle {
|
||||
return
|
||||
}
|
||||
if st.setTitle == nil {
|
||||
st.header = cleanupScannerLines(st.header, rxUncommentHeaders, nil)
|
||||
return
|
||||
}
|
||||
|
||||
st.workedOutTitle = true
|
||||
st.title, st.header = collectScannerTitleDescription(st.header)
|
||||
}
|
||||
|
||||
func (st *sectionedParser) Title() []string {
|
||||
st.collectTitleDescription()
|
||||
return st.title
|
||||
}
|
||||
|
||||
func (st *sectionedParser) Description() []string {
|
||||
st.collectTitleDescription()
|
||||
return st.header
|
||||
}
|
||||
|
||||
func (st *sectionedParser) Parse(doc *ast.CommentGroup) error {
|
||||
if doc == nil {
|
||||
return nil
|
||||
}
|
||||
COMMENTS:
|
||||
for _, c := range doc.List {
|
||||
for _, line := range strings.Split(c.Text, "\n") {
|
||||
if rxSwaggerAnnotation.MatchString(line) {
|
||||
if rxIgnoreOverride.MatchString(line) {
|
||||
st.ignored = true
|
||||
break COMMENTS // an explicit ignore terminates this parser
|
||||
}
|
||||
if st.annotation == nil || !st.annotation.Matches(line) {
|
||||
break COMMENTS // a new swagger: annotation terminates this parser
|
||||
}
|
||||
|
||||
_ = st.annotation.Parse([]string{line})
|
||||
if len(st.header) > 0 {
|
||||
st.seenTag = true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
var matched bool
|
||||
for _, tagger := range st.taggers {
|
||||
if tagger.Matches(line) {
|
||||
st.seenTag = true
|
||||
st.currentTagger = &tagger
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if st.currentTagger == nil {
|
||||
if !st.skipHeader && !st.seenTag {
|
||||
st.header = append(st.header, line)
|
||||
}
|
||||
// didn't match a tag, moving on
|
||||
continue
|
||||
}
|
||||
|
||||
if st.currentTagger.MultiLine && matched {
|
||||
// the first line of a multiline tagger doesn't count
|
||||
continue
|
||||
}
|
||||
|
||||
ts, ok := st.matched[st.currentTagger.Name]
|
||||
if !ok {
|
||||
ts = *st.currentTagger
|
||||
}
|
||||
ts.Lines = append(ts.Lines, line)
|
||||
if st.matched == nil {
|
||||
st.matched = make(map[string]tagParser)
|
||||
}
|
||||
st.matched[st.currentTagger.Name] = ts
|
||||
|
||||
if !st.currentTagger.MultiLine {
|
||||
st.currentTagger = nil
|
||||
}
|
||||
}
|
||||
}
|
||||
if st.setTitle != nil {
|
||||
st.setTitle(st.Title())
|
||||
}
|
||||
if st.setDescription != nil {
|
||||
st.setDescription(st.Description())
|
||||
}
|
||||
for _, mt := range st.matched {
|
||||
if !mt.SkipCleanUp {
|
||||
mt.Lines = cleanupScannerLines(mt.Lines, rxUncommentHeaders, nil)
|
||||
}
|
||||
if err := mt.Parse(mt.Lines); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
1336
vendor/github.com/go-swagger/go-swagger/scan/schema.go
generated
vendored
Normal file
1336
vendor/github.com/go-swagger/go-swagger/scan/schema.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
828
vendor/github.com/go-swagger/go-swagger/scan/validators.go
generated
vendored
Normal file
828
vendor/github.com/go-swagger/go-swagger/scan/validators.go
generated
vendored
Normal file
|
@ -0,0 +1,828 @@
|
|||
// +build !go1.11
|
||||
|
||||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package scan
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
type validationBuilder interface {
|
||||
SetMaximum(float64, bool)
|
||||
SetMinimum(float64, bool)
|
||||
SetMultipleOf(float64)
|
||||
|
||||
SetMinItems(int64)
|
||||
SetMaxItems(int64)
|
||||
|
||||
SetMinLength(int64)
|
||||
SetMaxLength(int64)
|
||||
SetPattern(string)
|
||||
|
||||
SetUnique(bool)
|
||||
SetEnum(string)
|
||||
SetDefault(interface{})
|
||||
SetExample(interface{})
|
||||
}
|
||||
|
||||
type valueParser interface {
|
||||
Parse([]string) error
|
||||
Matches(string) bool
|
||||
}
|
||||
|
||||
type setMaximum struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sm *setMaximum) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sm.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 2 && len(matches[2]) > 0 {
|
||||
max, err := strconv.ParseFloat(matches[2], 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sm.builder.SetMaximum(max, matches[1] == "<")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sm *setMaximum) Matches(line string) bool {
|
||||
return sm.rx.MatchString(line)
|
||||
}
|
||||
|
||||
type setMinimum struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sm *setMinimum) Matches(line string) bool {
|
||||
return sm.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (sm *setMinimum) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sm.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 2 && len(matches[2]) > 0 {
|
||||
min, err := strconv.ParseFloat(matches[2], 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sm.builder.SetMinimum(min, matches[1] == ">")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type setMultipleOf struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sm *setMultipleOf) Matches(line string) bool {
|
||||
return sm.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (sm *setMultipleOf) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sm.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 2 && len(matches[1]) > 0 {
|
||||
multipleOf, err := strconv.ParseFloat(matches[1], 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sm.builder.SetMultipleOf(multipleOf)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type setMaxItems struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sm *setMaxItems) Matches(line string) bool {
|
||||
return sm.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (sm *setMaxItems) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sm.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
maxItems, err := strconv.ParseInt(matches[1], 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sm.builder.SetMaxItems(maxItems)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type setMinItems struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sm *setMinItems) Matches(line string) bool {
|
||||
return sm.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (sm *setMinItems) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sm.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
minItems, err := strconv.ParseInt(matches[1], 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sm.builder.SetMinItems(minItems)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type setMaxLength struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sm *setMaxLength) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sm.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
maxLength, err := strconv.ParseInt(matches[1], 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sm.builder.SetMaxLength(maxLength)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sm *setMaxLength) Matches(line string) bool {
|
||||
return sm.rx.MatchString(line)
|
||||
}
|
||||
|
||||
type setMinLength struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sm *setMinLength) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sm.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
minLength, err := strconv.ParseInt(matches[1], 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sm.builder.SetMinLength(minLength)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sm *setMinLength) Matches(line string) bool {
|
||||
return sm.rx.MatchString(line)
|
||||
}
|
||||
|
||||
type setPattern struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sm *setPattern) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sm.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
sm.builder.SetPattern(matches[1])
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sm *setPattern) Matches(line string) bool {
|
||||
return sm.rx.MatchString(line)
|
||||
}
|
||||
|
||||
type setCollectionFormat struct {
|
||||
builder operationValidationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sm *setCollectionFormat) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sm.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
sm.builder.SetCollectionFormat(matches[1])
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sm *setCollectionFormat) Matches(line string) bool {
|
||||
return sm.rx.MatchString(line)
|
||||
}
|
||||
|
||||
type setUnique struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (su *setUnique) Matches(line string) bool {
|
||||
return su.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (su *setUnique) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := su.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
req, err := strconv.ParseBool(matches[1])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
su.builder.SetUnique(req)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type setEnum struct {
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (se *setEnum) Matches(line string) bool {
|
||||
return se.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (se *setEnum) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := se.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
se.builder.SetEnum(matches[1])
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseValueFromSchema(s string, schema *spec.SimpleSchema) (interface{}, error) {
|
||||
if schema != nil {
|
||||
switch strings.Trim(schema.TypeName(), "\"") {
|
||||
case "integer", "int", "int64", "int32", "int16":
|
||||
return strconv.Atoi(s)
|
||||
case "bool", "boolean":
|
||||
return strconv.ParseBool(s)
|
||||
case "number", "float64", "float32":
|
||||
return strconv.ParseFloat(s, 64)
|
||||
case "object":
|
||||
var obj map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(s), &obj); err != nil {
|
||||
// If we can't parse it, just return the string.
|
||||
return s, nil
|
||||
}
|
||||
return obj, nil
|
||||
case "array":
|
||||
var slice []interface{}
|
||||
if err := json.Unmarshal([]byte(s), &slice); err != nil {
|
||||
// If we can't parse it, just return the string.
|
||||
return s, nil
|
||||
}
|
||||
return slice, nil
|
||||
default:
|
||||
return s, nil
|
||||
}
|
||||
} else {
|
||||
return s, nil
|
||||
}
|
||||
}
|
||||
|
||||
type setDefault struct {
|
||||
scheme *spec.SimpleSchema
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (sd *setDefault) Matches(line string) bool {
|
||||
return sd.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (sd *setDefault) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := sd.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
d, err := parseValueFromSchema(matches[1], sd.scheme)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sd.builder.SetDefault(d)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type setExample struct {
|
||||
scheme *spec.SimpleSchema
|
||||
builder validationBuilder
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (se *setExample) Matches(line string) bool {
|
||||
return se.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (se *setExample) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := se.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
d, err := parseValueFromSchema(matches[1], se.scheme)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
se.builder.SetExample(d)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type matchOnlyParam struct {
|
||||
tgt *spec.Parameter
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (mo *matchOnlyParam) Matches(line string) bool {
|
||||
return mo.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (mo *matchOnlyParam) Parse(lines []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type setRequiredParam struct {
|
||||
tgt *spec.Parameter
|
||||
}
|
||||
|
||||
func (su *setRequiredParam) Matches(line string) bool {
|
||||
return rxRequired.MatchString(line)
|
||||
}
|
||||
|
||||
func (su *setRequiredParam) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := rxRequired.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
req, err := strconv.ParseBool(matches[1])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
su.tgt.Required = req
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type setReadOnlySchema struct {
|
||||
tgt *spec.Schema
|
||||
}
|
||||
|
||||
func (su *setReadOnlySchema) Matches(line string) bool {
|
||||
return rxReadOnly.MatchString(line)
|
||||
}
|
||||
|
||||
func (su *setReadOnlySchema) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := rxReadOnly.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
req, err := strconv.ParseBool(matches[1])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
su.tgt.ReadOnly = req
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type setDiscriminator struct {
|
||||
schema *spec.Schema
|
||||
field string
|
||||
}
|
||||
|
||||
func (su *setDiscriminator) Matches(line string) bool {
|
||||
return rxDiscriminator.MatchString(line)
|
||||
}
|
||||
|
||||
func (su *setDiscriminator) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := rxDiscriminator.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
req, err := strconv.ParseBool(matches[1])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if req {
|
||||
su.schema.Discriminator = su.field
|
||||
} else {
|
||||
if su.schema.Discriminator == su.field {
|
||||
su.schema.Discriminator = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type setRequiredSchema struct {
|
||||
schema *spec.Schema
|
||||
field string
|
||||
}
|
||||
|
||||
func (su *setRequiredSchema) Matches(line string) bool {
|
||||
return rxRequired.MatchString(line)
|
||||
}
|
||||
|
||||
func (su *setRequiredSchema) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := rxRequired.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
req, err := strconv.ParseBool(matches[1])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
midx := -1
|
||||
for i, nm := range su.schema.Required {
|
||||
if nm == su.field {
|
||||
midx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if req {
|
||||
if midx < 0 {
|
||||
su.schema.Required = append(su.schema.Required, su.field)
|
||||
}
|
||||
} else if midx >= 0 {
|
||||
su.schema.Required = append(su.schema.Required[:midx], su.schema.Required[midx+1:]...)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func newMultilineDropEmptyParser(rx *regexp.Regexp, set func([]string)) *multiLineDropEmptyParser {
|
||||
return &multiLineDropEmptyParser{
|
||||
rx: rx,
|
||||
set: set,
|
||||
}
|
||||
}
|
||||
|
||||
type multiLineDropEmptyParser struct {
|
||||
set func([]string)
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (m *multiLineDropEmptyParser) Matches(line string) bool {
|
||||
return m.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (m *multiLineDropEmptyParser) Parse(lines []string) error {
|
||||
m.set(removeEmptyLines(lines))
|
||||
return nil
|
||||
}
|
||||
|
||||
func newSetSchemes(set func([]string)) *setSchemes {
|
||||
return &setSchemes{
|
||||
set: set,
|
||||
rx: rxSchemes,
|
||||
}
|
||||
}
|
||||
|
||||
type setSchemes struct {
|
||||
set func([]string)
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (ss *setSchemes) Matches(line string) bool {
|
||||
return ss.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (ss *setSchemes) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
matches := ss.rx.FindStringSubmatch(lines[0])
|
||||
if len(matches) > 1 && len(matches[1]) > 0 {
|
||||
sch := strings.Split(matches[1], ", ")
|
||||
|
||||
var schemes []string
|
||||
for _, s := range sch {
|
||||
ts := strings.TrimSpace(s)
|
||||
if ts != "" {
|
||||
schemes = append(schemes, ts)
|
||||
}
|
||||
}
|
||||
ss.set(schemes)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func newSetSecurity(rx *regexp.Regexp, setter func([]map[string][]string)) *setSecurity {
|
||||
return &setSecurity{
|
||||
set: setter,
|
||||
rx: rx,
|
||||
}
|
||||
}
|
||||
|
||||
type setSecurity struct {
|
||||
set func([]map[string][]string)
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (ss *setSecurity) Matches(line string) bool {
|
||||
return ss.rx.MatchString(line)
|
||||
}
|
||||
|
||||
func (ss *setSecurity) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []map[string][]string
|
||||
for _, line := range lines {
|
||||
kv := strings.SplitN(line, ":", 2)
|
||||
scopes := []string{}
|
||||
var key string
|
||||
|
||||
if len(kv) > 1 {
|
||||
scs := strings.Split(kv[1], ",")
|
||||
for _, scope := range scs {
|
||||
tr := strings.TrimSpace(scope)
|
||||
if tr != "" {
|
||||
tr = strings.SplitAfter(tr, " ")[0]
|
||||
scopes = append(scopes, strings.TrimSpace(tr))
|
||||
}
|
||||
}
|
||||
|
||||
key = strings.TrimSpace(kv[0])
|
||||
|
||||
result = append(result, map[string][]string{key: scopes})
|
||||
}
|
||||
}
|
||||
ss.set(result)
|
||||
return nil
|
||||
}
|
||||
|
||||
func newSetResponses(definitions map[string]spec.Schema, responses map[string]spec.Response, setter func(*spec.Response, map[int]spec.Response)) *setOpResponses {
|
||||
return &setOpResponses{
|
||||
set: setter,
|
||||
rx: rxResponses,
|
||||
definitions: definitions,
|
||||
responses: responses,
|
||||
}
|
||||
}
|
||||
|
||||
type setOpResponses struct {
|
||||
set func(*spec.Response, map[int]spec.Response)
|
||||
rx *regexp.Regexp
|
||||
definitions map[string]spec.Schema
|
||||
responses map[string]spec.Response
|
||||
}
|
||||
|
||||
func (ss *setOpResponses) Matches(line string) bool {
|
||||
return ss.rx.MatchString(line)
|
||||
}
|
||||
|
||||
//ResponseTag used when specifying a response to point to a defined swagger:response
|
||||
const ResponseTag = "response"
|
||||
|
||||
//BodyTag used when specifying a response to point to a model/schema
|
||||
const BodyTag = "body"
|
||||
|
||||
//DescriptionTag used when specifying a response that gives a description of the response
|
||||
const DescriptionTag = "description"
|
||||
|
||||
func parseTags(line string) (modelOrResponse string, arrays int, isDefinitionRef bool, description string, err error) {
|
||||
tags := strings.Split(line, " ")
|
||||
parsedModelOrResponse := false
|
||||
|
||||
for i, tagAndValue := range tags {
|
||||
tagValList := strings.SplitN(tagAndValue, ":", 2)
|
||||
var tag, value string
|
||||
if len(tagValList) > 1 {
|
||||
tag = tagValList[0]
|
||||
value = tagValList[1]
|
||||
} else {
|
||||
//TODO: Print a warning, and in the long term, do not support not tagged values
|
||||
//Add a default tag if none is supplied
|
||||
if i == 0 {
|
||||
tag = ResponseTag
|
||||
} else {
|
||||
tag = DescriptionTag
|
||||
}
|
||||
value = tagValList[0]
|
||||
}
|
||||
|
||||
foundModelOrResponse := false
|
||||
if !parsedModelOrResponse {
|
||||
if tag == BodyTag {
|
||||
foundModelOrResponse = true
|
||||
isDefinitionRef = true
|
||||
}
|
||||
if tag == ResponseTag {
|
||||
foundModelOrResponse = true
|
||||
isDefinitionRef = false
|
||||
}
|
||||
}
|
||||
if foundModelOrResponse {
|
||||
//Read the model or response tag
|
||||
parsedModelOrResponse = true
|
||||
//Check for nested arrays
|
||||
arrays = 0
|
||||
for strings.HasPrefix(value, "[]") {
|
||||
arrays++
|
||||
value = value[2:]
|
||||
}
|
||||
//What's left over is the model name
|
||||
modelOrResponse = value
|
||||
} else {
|
||||
foundDescription := false
|
||||
if tag == DescriptionTag {
|
||||
foundDescription = true
|
||||
}
|
||||
if foundDescription {
|
||||
//Descriptions are special, they make they read the rest of the line
|
||||
descriptionWords := []string{value}
|
||||
if i < len(tags)-1 {
|
||||
descriptionWords = append(descriptionWords, tags[i+1:]...)
|
||||
}
|
||||
description = strings.Join(descriptionWords, " ")
|
||||
break
|
||||
} else {
|
||||
if tag == ResponseTag || tag == BodyTag || tag == DescriptionTag {
|
||||
err = fmt.Errorf("Found valid tag %s, but not in a valid position", tag)
|
||||
} else {
|
||||
err = fmt.Errorf("Found invalid tag: %s", tag)
|
||||
}
|
||||
//return error
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: Maybe do, if !parsedModelOrResponse {return some error}
|
||||
return
|
||||
}
|
||||
|
||||
func (ss *setOpResponses) Parse(lines []string) error {
|
||||
if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
|
||||
return nil
|
||||
}
|
||||
|
||||
var def *spec.Response
|
||||
var scr map[int]spec.Response
|
||||
|
||||
for _, line := range lines {
|
||||
kv := strings.SplitN(line, ":", 2)
|
||||
var key, value string
|
||||
|
||||
if len(kv) > 1 {
|
||||
key = strings.TrimSpace(kv[0])
|
||||
if key == "" {
|
||||
// this must be some weird empty line
|
||||
continue
|
||||
}
|
||||
value = strings.TrimSpace(kv[1])
|
||||
if value == "" {
|
||||
var resp spec.Response
|
||||
if strings.EqualFold("default", key) {
|
||||
if def == nil {
|
||||
def = &resp
|
||||
}
|
||||
} else {
|
||||
if sc, err := strconv.Atoi(key); err == nil {
|
||||
if scr == nil {
|
||||
scr = make(map[int]spec.Response)
|
||||
}
|
||||
scr[sc] = resp
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
refTarget, arrays, isDefinitionRef, description, err := parseTags(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//A possible exception for having a definition
|
||||
if _, ok := ss.responses[refTarget]; !ok {
|
||||
if _, ok := ss.definitions[refTarget]; ok {
|
||||
isDefinitionRef = true
|
||||
}
|
||||
}
|
||||
|
||||
var ref spec.Ref
|
||||
if isDefinitionRef {
|
||||
if description == "" {
|
||||
description = refTarget
|
||||
}
|
||||
ref, err = spec.NewRef("#/definitions/" + refTarget)
|
||||
} else {
|
||||
ref, err = spec.NewRef("#/responses/" + refTarget)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// description should used on anyway.
|
||||
resp := spec.Response{ResponseProps: spec.ResponseProps{Description: description}}
|
||||
|
||||
if isDefinitionRef {
|
||||
resp.Schema = new(spec.Schema)
|
||||
resp.Description = description
|
||||
if arrays == 0 {
|
||||
resp.Schema.Ref = ref
|
||||
} else {
|
||||
cs := resp.Schema
|
||||
for i := 0; i < arrays; i++ {
|
||||
cs.Typed("array", "")
|
||||
cs.Items = new(spec.SchemaOrArray)
|
||||
cs.Items.Schema = new(spec.Schema)
|
||||
cs = cs.Items.Schema
|
||||
}
|
||||
cs.Ref = ref
|
||||
}
|
||||
// ref. could be empty while use description tag
|
||||
} else if len(refTarget) > 0 {
|
||||
resp.Ref = ref
|
||||
}
|
||||
|
||||
if strings.EqualFold("default", key) {
|
||||
if def == nil {
|
||||
def = &resp
|
||||
}
|
||||
} else {
|
||||
if sc, err := strconv.Atoi(key); err == nil {
|
||||
if scr == nil {
|
||||
scr = make(map[int]spec.Response)
|
||||
}
|
||||
scr[sc] = resp
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ss.set(def, scr)
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseEnum(val string, s *spec.SimpleSchema) []interface{} {
|
||||
list := strings.Split(val, ",")
|
||||
interfaceSlice := make([]interface{}, len(list))
|
||||
for i, d := range list {
|
||||
v, err := parseValueFromSchema(d, s)
|
||||
if err != nil {
|
||||
interfaceSlice[i] = d
|
||||
continue
|
||||
}
|
||||
|
||||
interfaceSlice[i] = v
|
||||
}
|
||||
return interfaceSlice
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue