Added validation for structs (#19)
This commit is contained in:
parent
373bbd2202
commit
0c544fe355
299 changed files with 135 additions and 64237 deletions
|
@ -220,12 +220,12 @@ Teams sind global, d.h. Ein Team kann mehrere Namespaces verwalten.
|
||||||
* [x] General search endpoints
|
* [x] General search endpoints
|
||||||
* [ ] Wir brauchen noch ne gute idee, wie man die listen kriegt, auf die man nur so Zugriff hat (ohne namespace)
|
* [ ] Wir brauchen noch ne gute idee, wie man die listen kriegt, auf die man nur so Zugriff hat (ohne namespace)
|
||||||
* Dazu am Besten nen pseudonamespace anlegen (id -1 oder so), der hat das dann alles
|
* Dazu am Besten nen pseudonamespace anlegen (id -1 oder so), der hat das dann alles
|
||||||
* [ ] Validation der ankommenden structs, am besten mit https://github.com/go-validator/validator oder mit dem Ding von echo
|
* [x] Validation der ankommenden structs, am besten mit https://github.com/go-validator/validator oder mit dem Ding von echo
|
||||||
* [x] Pagination
|
* [x] Pagination
|
||||||
* Sollte in der Config definierbar sein, wie viel pro Seite angezeigt werden soll, die CRUD-Methoden übergeben dann ein "gibt mir die Seite sowieso" an die CRUDable-Funktionenen, die müssen das dann Auswerten. Geht leider nicht anders, wenn man erst 2342352 Einträge hohlt und die dann nachträglich auf 200 begrenzt ist das ne massive Ressourcenverschwendung.
|
* Sollte in der Config definierbar sein, wie viel pro Seite angezeigt werden soll, die CRUD-Methoden übergeben dann ein "gibt mir die Seite sowieso" an die CRUDable-Funktionenen, die müssen das dann Auswerten. Geht leider nicht anders, wenn man erst 2342352 Einträge hohlt und die dann nachträglich auf 200 begrenzt ist das ne massive Ressourcenverschwendung.
|
||||||
* [ ] Testing mit locust: https://locust.io/
|
* [ ] Testing mit locust: https://locust.io/
|
||||||
* [ ] Methode einbauen, um mit einem gültigen token ein neues gültiges zu kriegen
|
* [ ] Methode einbauen, um mit einem gültigen token ein neues gültiges zu kriegen
|
||||||
* [ ] Testen, ob man über die Routen methode von echo irgendwie ein swagger spec generieren könnte
|
* [x] Testen, ob man über die Routen methode von echo irgendwie ein swagger spec generieren könnte -> Andere Swagger library
|
||||||
* [ ] CalDAV
|
* [ ] CalDAV
|
||||||
* [x] Basics
|
* [x] Basics
|
||||||
* [x] Reminders
|
* [x] Reminders
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
# Get all lists
|
# Get all lists
|
||||||
GET http://localhost:8080/api/v1/lists?page=0
|
GET http://localhost:8080/api/v1/lists
|
||||||
Authorization: Bearer {{auth_token}}
|
Authorization: Bearer {{auth_token}}
|
||||||
|
|
||||||
###
|
###
|
||||||
|
|
||||||
# Get one list
|
# Get one list
|
||||||
GET http://localhost:8080/api/v1/lists/2
|
GET http://localhost:8080/api/v1/lists/1
|
||||||
Authorization: Bearer {{auth_token}}
|
Authorization: Bearer {{auth_token}}
|
||||||
|
|
||||||
###
|
###
|
||||||
|
@ -15,7 +15,10 @@ PUT http://localhost:8080/api/v1/namespaces/1/lists
|
||||||
Authorization: Bearer {{auth_token}}
|
Authorization: Bearer {{auth_token}}
|
||||||
Content-Type: application/json
|
Content-Type: application/json
|
||||||
|
|
||||||
{}
|
{
|
||||||
|
"title": "sc",
|
||||||
|
"created": 0
|
||||||
|
}
|
||||||
|
|
||||||
###
|
###
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@ This document describes the different errors Vikunja can return.
|
||||||
| 1011 | 412 | Wrong username or password. |
|
| 1011 | 412 | Wrong username or password. |
|
||||||
| 1012 | 412 | Email address of the user not confirmed. |
|
| 1012 | 412 | Email address of the user not confirmed. |
|
||||||
| 2001 | 400 | ID cannot be empty or 0. |
|
| 2001 | 400 | ID cannot be empty or 0. |
|
||||||
|
| 2002 | 400 | Some of the request data was invalid. The response contains an aditional array with all invalid fields. |
|
||||||
| 3001 | 404 | The list does not exist. |
|
| 3001 | 404 | The list does not exist. |
|
||||||
| 3004 | 403 | The user needs to have read permissions on that list to perform that action. |
|
| 3004 | 403 | The user needs to have read permissions on that list to perform that action. |
|
||||||
| 3005 | 400 | The list title cannot be empty. |
|
| 3005 | 400 | The list title cannot be empty. |
|
||||||
|
|
5
go.mod
5
go.mod
|
@ -4,6 +4,7 @@ require (
|
||||||
cloud.google.com/go v0.30.0 // indirect
|
cloud.google.com/go v0.30.0 // indirect
|
||||||
github.com/BurntSushi/toml v0.3.1 // indirect
|
github.com/BurntSushi/toml v0.3.1 // indirect
|
||||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc
|
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc
|
||||||
|
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf
|
||||||
github.com/client9/misspell v0.3.4
|
github.com/client9/misspell v0.3.4
|
||||||
github.com/denisenkom/go-mssqldb v0.0.0-20180901172138-1eb28afdf9b6 // indirect
|
github.com/denisenkom/go-mssqldb v0.0.0-20180901172138-1eb28afdf9b6 // indirect
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||||
|
@ -19,8 +20,10 @@ require (
|
||||||
github.com/go-openapi/strfmt v0.17.2 // indirect
|
github.com/go-openapi/strfmt v0.17.2 // indirect
|
||||||
github.com/go-openapi/swag v0.17.2 // indirect
|
github.com/go-openapi/swag v0.17.2 // indirect
|
||||||
github.com/go-openapi/validate v0.17.2 // indirect
|
github.com/go-openapi/validate v0.17.2 // indirect
|
||||||
|
github.com/go-playground/locales v0.12.1 // indirect
|
||||||
|
github.com/go-playground/universal-translator v0.16.0 // indirect
|
||||||
github.com/go-sql-driver/mysql v0.0.0-20171007150158-ee359f95877b
|
github.com/go-sql-driver/mysql v0.0.0-20171007150158-ee359f95877b
|
||||||
github.com/go-swagger/go-swagger v0.17.2
|
github.com/go-swagger/go-swagger v0.17.2 // indirect
|
||||||
github.com/go-xorm/builder v0.0.0-20170519032130-c8871c857d25 // indirect
|
github.com/go-xorm/builder v0.0.0-20170519032130-c8871c857d25 // indirect
|
||||||
github.com/go-xorm/core v0.5.8
|
github.com/go-xorm/core v0.5.8
|
||||||
github.com/go-xorm/tests v0.5.6 // indirect
|
github.com/go-xorm/tests v0.5.6 // indirect
|
||||||
|
|
6
go.sum
6
go.sum
|
@ -63,6 +63,10 @@ github.com/go-openapi/swag v0.17.2/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/
|
||||||
github.com/go-openapi/validate v0.17.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4=
|
github.com/go-openapi/validate v0.17.0/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4=
|
||||||
github.com/go-openapi/validate v0.17.2 h1:lwFfiS4sv5DvOrsYDsYq4N7UU8ghXiYtPJ+VcQnC3Xg=
|
github.com/go-openapi/validate v0.17.2 h1:lwFfiS4sv5DvOrsYDsYq4N7UU8ghXiYtPJ+VcQnC3Xg=
|
||||||
github.com/go-openapi/validate v0.17.2/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4=
|
github.com/go-openapi/validate v0.17.2/go.mod h1:Uh4HdOzKt19xGIGm1qHf/ofbX1YQ4Y+MYsct2VUrAJ4=
|
||||||
|
github.com/go-playground/locales v0.12.1 h1:2FITxuFt/xuCNP1Acdhv62OzaCiviiE4kotfhkmOqEc=
|
||||||
|
github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3ygWanZIBtBW0W2TM=
|
||||||
|
github.com/go-playground/universal-translator v0.16.0 h1:X++omBR/4cE2MNg91AoC3rmGrCjJ8eAeUP/K/EKx4DM=
|
||||||
|
github.com/go-playground/universal-translator v0.16.0/go.mod h1:1AnU7NaIRDWWzGEKwgtJRd2xk99HeFyHw3yid4rvQIY=
|
||||||
github.com/go-sql-driver/mysql v0.0.0-20171007150158-ee359f95877b h1:/CMGgAYard7jx9+bI7tUIqafFDR7Pv2BRu2Tb5dDaqM=
|
github.com/go-sql-driver/mysql v0.0.0-20171007150158-ee359f95877b h1:/CMGgAYard7jx9+bI7tUIqafFDR7Pv2BRu2Tb5dDaqM=
|
||||||
github.com/go-sql-driver/mysql v0.0.0-20171007150158-ee359f95877b/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
github.com/go-sql-driver/mysql v0.0.0-20171007150158-ee359f95877b/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
|
||||||
github.com/go-swagger/go-swagger v0.17.2 h1:eizwRyO8THHMA4kXyM5Z1UTPslZGE8VsfJC0jJqsRI8=
|
github.com/go-swagger/go-swagger v0.17.2 h1:eizwRyO8THHMA4kXyM5Z1UTPslZGE8VsfJC0jJqsRI8=
|
||||||
|
@ -193,6 +197,8 @@ gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc h1:2gG
|
||||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk=
|
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/go-playground/validator.v9 v9.23.0 h1:oq297iqu7qsywIbeW5DBUTtV1nV750Y4q+H8MnDh0Yc=
|
||||||
|
gopkg.in/go-playground/validator.v9 v9.23.0/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ=
|
||||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE=
|
||||||
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw=
|
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw=
|
||||||
gopkg.in/testfixtures.v2 v2.4.5 h1:mnfYPBNoJnis+4crs6UzC4lv4GjTVoLXE9B/tW802q0=
|
gopkg.in/testfixtures.v2 v2.4.5 h1:mnfYPBNoJnis+4crs6UzC4lv4GjTVoLXE9B/tW802q0=
|
||||||
|
|
|
@ -267,6 +267,43 @@ func (err ErrIDCannotBeZero) HTTPError() HTTPError {
|
||||||
return HTTPError{HTTPCode: http.StatusBadRequest, Code: ErrCodeIDCannotBeZero, Message: "The ID cannot be empty or 0."}
|
return HTTPError{HTTPCode: http.StatusBadRequest, Code: ErrCodeIDCannotBeZero, Message: "The ID cannot be empty or 0."}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ErrInvalidData represents a "ErrInvalidData" kind of error. Used when a struct is invalid -> validation failed.
|
||||||
|
type ErrInvalidData struct {
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsErrInvalidData checks if an error is a ErrIDCannotBeZero.
|
||||||
|
func IsErrInvalidData(err error) bool {
|
||||||
|
_, ok := err.(ErrInvalidData)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err ErrInvalidData) Error() string {
|
||||||
|
return fmt.Sprintf("Struct is invalid. %s", err.Message)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrCodeInvalidData holds the unique world-error code of this error
|
||||||
|
const ErrCodeInvalidData = 2002
|
||||||
|
|
||||||
|
// HTTPError holds the http error description
|
||||||
|
func (err ErrInvalidData) HTTPError() HTTPError {
|
||||||
|
return HTTPError{HTTPCode: http.StatusBadRequest, Code: ErrCodeInvalidData, Message: err.Message}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidationHTTPError is the http error when a validation fails
|
||||||
|
type ValidationHTTPError struct {
|
||||||
|
HTTPError
|
||||||
|
InvalidFields []string `json:"invalid_fields"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error implements the Error type (so we can return it as type error)
|
||||||
|
func (err ValidationHTTPError) Error() string {
|
||||||
|
theErr := ErrInvalidData{
|
||||||
|
Message: err.Message,
|
||||||
|
}
|
||||||
|
return theErr.Error()
|
||||||
|
}
|
||||||
|
|
||||||
// ===========
|
// ===========
|
||||||
// List errors
|
// List errors
|
||||||
// ===========
|
// ===========
|
||||||
|
|
|
@ -5,16 +5,16 @@ import "sort"
|
||||||
// List represents a list of tasks
|
// List represents a list of tasks
|
||||||
type List struct {
|
type List struct {
|
||||||
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"list"`
|
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"list"`
|
||||||
Title string `xorm:"varchar(250)" json:"title"`
|
Title string `xorm:"varchar(250)" json:"title" valid:"required,runelength(5|250)"`
|
||||||
Description string `xorm:"varchar(1000)" json:"description"`
|
Description string `xorm:"varchar(1000)" json:"description" valid:"runelength(0|1000)"`
|
||||||
OwnerID int64 `xorm:"int(11) INDEX" json:"-"`
|
OwnerID int64 `xorm:"int(11) INDEX" json:"-"`
|
||||||
NamespaceID int64 `xorm:"int(11) INDEX" json:"-" param:"namespace"`
|
NamespaceID int64 `xorm:"int(11) INDEX" json:"-" param:"namespace"`
|
||||||
|
|
||||||
Owner User `xorm:"-" json:"owner"`
|
Owner User `xorm:"-" json:"owner"`
|
||||||
Tasks []*ListTask `xorm:"-" json:"tasks"`
|
Tasks []*ListTask `xorm:"-" json:"tasks"`
|
||||||
|
|
||||||
Created int64 `xorm:"created" json:"created"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
Updated int64 `xorm:"updated" json:"updated"`
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
|
|
||||||
CRUDable `xorm:"-" json:"-"`
|
CRUDable `xorm:"-" json:"-"`
|
||||||
Rights `xorm:"-" json:"-"`
|
Rights `xorm:"-" json:"-"`
|
||||||
|
|
|
@ -3,15 +3,16 @@ package models
|
||||||
// ListTask represents an task in a todolist
|
// ListTask represents an task in a todolist
|
||||||
type ListTask struct {
|
type ListTask struct {
|
||||||
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"listtask"`
|
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"listtask"`
|
||||||
Text string `xorm:"varchar(250)" json:"text"`
|
Text string `xorm:"varchar(250)" json:"text" valid:"required,runelength(5|250)"`
|
||||||
Description string `xorm:"varchar(250)" json:"description"`
|
Description string `xorm:"varchar(250)" json:"description" valid:"runelength(0|250)"`
|
||||||
Done bool `xorm:"INDEX" json:"done"`
|
Done bool `xorm:"INDEX" json:"done"`
|
||||||
DueDateUnix int64 `xorm:"int(11) INDEX" json:"dueDate"`
|
DueDateUnix int64 `xorm:"int(11) INDEX" json:"dueDate"`
|
||||||
ReminderUnix int64 `xorm:"int(11) INDEX" json:"reminderDate"`
|
ReminderUnix int64 `xorm:"int(11) INDEX" json:"reminderDate"`
|
||||||
CreatedByID int64 `xorm:"int(11)" json:"-"` // ID of the user who put that task on the list
|
CreatedByID int64 `xorm:"int(11)" json:"-"` // ID of the user who put that task on the list
|
||||||
ListID int64 `xorm:"int(11) INDEX" json:"listID" param:"list"`
|
ListID int64 `xorm:"int(11) INDEX" json:"listID" param:"list" valid:"required"`
|
||||||
Created int64 `xorm:"created" json:"created"`
|
|
||||||
Updated int64 `xorm:"updated" json:"updated"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
|
|
||||||
CreatedBy User `xorm:"-" json:"createdBy"`
|
CreatedBy User `xorm:"-" json:"createdBy"`
|
||||||
|
|
||||||
|
|
|
@ -5,10 +5,10 @@ type ListUser struct {
|
||||||
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"namespace"`
|
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"namespace"`
|
||||||
UserID int64 `xorm:"int(11) not null INDEX" json:"user_id" param:"user"`
|
UserID int64 `xorm:"int(11) not null INDEX" json:"user_id" param:"user"`
|
||||||
ListID int64 `xorm:"int(11) not null INDEX" json:"list_id" param:"list"`
|
ListID int64 `xorm:"int(11) not null INDEX" json:"list_id" param:"list"`
|
||||||
Right UserRight `xorm:"int(11) INDEX" json:"right"`
|
Right UserRight `xorm:"int(11) INDEX" json:"right" valid:"length(0|2)"`
|
||||||
|
|
||||||
Created int64 `xorm:"created" json:"created"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
Updated int64 `xorm:"updated" json:"updated"`
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
|
|
||||||
CRUDable `xorm:"-" json:"-"`
|
CRUDable `xorm:"-" json:"-"`
|
||||||
Rights `xorm:"-" json:"-"`
|
Rights `xorm:"-" json:"-"`
|
||||||
|
|
|
@ -3,14 +3,14 @@ package models
|
||||||
// Namespace holds informations about a namespace
|
// Namespace holds informations about a namespace
|
||||||
type Namespace struct {
|
type Namespace struct {
|
||||||
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"namespace"`
|
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"namespace"`
|
||||||
Name string `xorm:"varchar(250)" json:"name"`
|
Name string `xorm:"varchar(250)" json:"name" valid:"required,runelength(5|250)"`
|
||||||
Description string `xorm:"varchar(1000)" json:"description"`
|
Description string `xorm:"varchar(1000)" json:"description" valid:"runelength(0|250)"`
|
||||||
OwnerID int64 `xorm:"int(11) not null INDEX" json:"-"`
|
OwnerID int64 `xorm:"int(11) not null INDEX" json:"-"`
|
||||||
|
|
||||||
Owner User `xorm:"-" json:"owner"`
|
Owner User `xorm:"-" json:"owner"`
|
||||||
|
|
||||||
Created int64 `xorm:"created" json:"created"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
Updated int64 `xorm:"updated" json:"updated"`
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
|
|
||||||
CRUDable `xorm:"-" json:"-"`
|
CRUDable `xorm:"-" json:"-"`
|
||||||
Rights `xorm:"-" json:"-"`
|
Rights `xorm:"-" json:"-"`
|
||||||
|
|
|
@ -5,10 +5,10 @@ type NamespaceUser struct {
|
||||||
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"namespace"`
|
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"namespace"`
|
||||||
UserID int64 `xorm:"int(11) not null INDEX" json:"user_id" param:"user"`
|
UserID int64 `xorm:"int(11) not null INDEX" json:"user_id" param:"user"`
|
||||||
NamespaceID int64 `xorm:"int(11) not null INDEX" json:"namespace_id" param:"namespace"`
|
NamespaceID int64 `xorm:"int(11) not null INDEX" json:"namespace_id" param:"namespace"`
|
||||||
Right UserRight `xorm:"int(11) INDEX" json:"right"`
|
Right UserRight `xorm:"int(11) INDEX" json:"right" valid:"length(0|2)"`
|
||||||
|
|
||||||
Created int64 `xorm:"created" json:"created"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
Updated int64 `xorm:"updated" json:"updated"`
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
|
|
||||||
CRUDable `xorm:"-" json:"-"`
|
CRUDable `xorm:"-" json:"-"`
|
||||||
Rights `xorm:"-" json:"-"`
|
Rights `xorm:"-" json:"-"`
|
||||||
|
|
|
@ -5,10 +5,10 @@ type TeamList struct {
|
||||||
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id"`
|
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id"`
|
||||||
TeamID int64 `xorm:"int(11) not null INDEX" json:"team_id" param:"team"`
|
TeamID int64 `xorm:"int(11) not null INDEX" json:"team_id" param:"team"`
|
||||||
ListID int64 `xorm:"int(11) not null INDEX" json:"list_id" param:"list"`
|
ListID int64 `xorm:"int(11) not null INDEX" json:"list_id" param:"list"`
|
||||||
Right TeamRight `xorm:"int(11) INDEX" json:"right"`
|
Right TeamRight `xorm:"int(11) INDEX" json:"right" valid:"length(0|2)"`
|
||||||
|
|
||||||
Created int64 `xorm:"created" json:"created"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
Updated int64 `xorm:"updated" json:"updated"`
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
|
|
||||||
CRUDable `xorm:"-" json:"-"`
|
CRUDable `xorm:"-" json:"-"`
|
||||||
Rights `xorm:"-" json:"-"`
|
Rights `xorm:"-" json:"-"`
|
||||||
|
|
|
@ -5,10 +5,10 @@ type TeamNamespace struct {
|
||||||
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id"`
|
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id"`
|
||||||
TeamID int64 `xorm:"int(11) not null INDEX" json:"team_id" param:"team"`
|
TeamID int64 `xorm:"int(11) not null INDEX" json:"team_id" param:"team"`
|
||||||
NamespaceID int64 `xorm:"int(11) not null INDEX" json:"namespace_id" param:"namespace"`
|
NamespaceID int64 `xorm:"int(11) not null INDEX" json:"namespace_id" param:"namespace"`
|
||||||
Right TeamRight `xorm:"int(11) INDEX" json:"right"`
|
Right TeamRight `xorm:"int(11) INDEX" json:"right" valid:"length(0|2)"`
|
||||||
|
|
||||||
Created int64 `xorm:"created" json:"created"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
Updated int64 `xorm:"updated" json:"updated"`
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
|
|
||||||
CRUDable `xorm:"-" json:"-"`
|
CRUDable `xorm:"-" json:"-"`
|
||||||
Rights `xorm:"-" json:"-"`
|
Rights `xorm:"-" json:"-"`
|
||||||
|
|
|
@ -3,15 +3,15 @@ package models
|
||||||
// Team holds a team object
|
// Team holds a team object
|
||||||
type Team struct {
|
type Team struct {
|
||||||
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"team"`
|
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id" param:"team"`
|
||||||
Name string `xorm:"varchar(250) not null" json:"name"`
|
Name string `xorm:"varchar(250) not null" json:"name" valid:"required,runelength(5|250)"`
|
||||||
Description string `xorm:"varchar(250)" json:"description"`
|
Description string `xorm:"varchar(250)" json:"description" valid:"runelength(0|250)"`
|
||||||
CreatedByID int64 `xorm:"int(11) not null INDEX" json:"-"`
|
CreatedByID int64 `xorm:"int(11) not null INDEX" json:"-"`
|
||||||
|
|
||||||
CreatedBy User `xorm:"-" json:"created_by"`
|
CreatedBy User `xorm:"-" json:"created_by"`
|
||||||
Members []*TeamUser `xorm:"-" json:"members"`
|
Members []*TeamUser `xorm:"-" json:"members"`
|
||||||
|
|
||||||
Created int64 `xorm:"created" json:"created"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
Updated int64 `xorm:"updated" json:"updated"`
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
|
|
||||||
CRUDable `xorm:"-" json:"-"`
|
CRUDable `xorm:"-" json:"-"`
|
||||||
Rights `xorm:"-" json:"-"`
|
Rights `xorm:"-" json:"-"`
|
||||||
|
@ -42,8 +42,8 @@ type TeamMember struct {
|
||||||
UserID int64 `xorm:"int(11) not null INDEX" json:"user_id" param:"user"`
|
UserID int64 `xorm:"int(11) not null INDEX" json:"user_id" param:"user"`
|
||||||
Admin bool `xorm:"tinyint(1) INDEX" json:"admin"`
|
Admin bool `xorm:"tinyint(1) INDEX" json:"admin"`
|
||||||
|
|
||||||
Created int64 `xorm:"created" json:"created"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
Updated int64 `xorm:"updated" json:"updated"`
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
|
|
||||||
CRUDable `xorm:"-" json:"-"`
|
CRUDable `xorm:"-" json:"-"`
|
||||||
Rights `xorm:"-" json:"-"`
|
Rights `xorm:"-" json:"-"`
|
||||||
|
|
|
@ -15,16 +15,16 @@ type UserLogin struct {
|
||||||
// User holds information about an user
|
// User holds information about an user
|
||||||
type User struct {
|
type User struct {
|
||||||
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id"`
|
ID int64 `xorm:"int(11) autoincr not null unique pk" json:"id"`
|
||||||
Username string `xorm:"varchar(250) not null unique" json:"username"`
|
Username string `xorm:"varchar(250) not null unique" json:"username" valid:"length(5|250)"`
|
||||||
Password string `xorm:"varchar(250) not null" json:"-"`
|
Password string `xorm:"varchar(250) not null" json:"-"`
|
||||||
Email string `xorm:"varchar(250)" json:"email"`
|
Email string `xorm:"varchar(250)" json:"email" valid:"email,length(0|250)"`
|
||||||
IsActive bool `json:"-"`
|
IsActive bool `json:"-"`
|
||||||
|
|
||||||
PasswordResetToken string `xorm:"varchar(450)" json:"-"`
|
PasswordResetToken string `xorm:"varchar(450)" json:"-"`
|
||||||
EmailConfirmToken string `xorm:"varchar(450)" json:"-"`
|
EmailConfirmToken string `xorm:"varchar(450)" json:"-"`
|
||||||
|
|
||||||
Created int64 `xorm:"created" json:"-"`
|
Created int64 `xorm:"created" json:"created" valid:"range(0|0)"`
|
||||||
Updated int64 `xorm:"updated" json:"-"`
|
Updated int64 `xorm:"updated" json:"updated" valid:"range(0|0)"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// TableName returns the table name for users
|
// TableName returns the table name for users
|
||||||
|
|
|
@ -17,6 +17,11 @@ func (c *WebHandler) CreateWeb(ctx echo.Context) error {
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, "No or invalid model provided.")
|
return echo.NewHTTPError(http.StatusBadRequest, "No or invalid model provided.")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate the struct
|
||||||
|
if err := ctx.Validate(currentStruct); err != nil {
|
||||||
|
return echo.NewHTTPError(http.StatusBadRequest, err)
|
||||||
|
}
|
||||||
|
|
||||||
// Get the user to pass for later checks
|
// Get the user to pass for later checks
|
||||||
currentUser, err := models.GetCurrentUser(ctx)
|
currentUser, err := models.GetCurrentUser(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -18,6 +18,11 @@ func (c *WebHandler) UpdateWeb(ctx echo.Context) error {
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, "No or invalid model provided.")
|
return echo.NewHTTPError(http.StatusBadRequest, "No or invalid model provided.")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate the struct
|
||||||
|
if err := ctx.Validate(currentStruct); err != nil {
|
||||||
|
return echo.NewHTTPError(http.StatusBadRequest, err)
|
||||||
|
}
|
||||||
|
|
||||||
// Check if the user has the right to do that
|
// Check if the user has the right to do that
|
||||||
currentUser, err := models.GetCurrentUser(ctx)
|
currentUser, err := models.GetCurrentUser(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -11,17 +11,42 @@
|
||||||
package routes
|
package routes
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"code.vikunja.io/api/pkg/models"
|
|
||||||
"github.com/labstack/echo"
|
|
||||||
"github.com/labstack/echo/middleware"
|
|
||||||
"github.com/swaggo/echo-swagger"
|
|
||||||
|
|
||||||
_ "code.vikunja.io/api/docs" // To generate swagger docs
|
_ "code.vikunja.io/api/docs" // To generate swagger docs
|
||||||
|
"code.vikunja.io/api/pkg/models"
|
||||||
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
apiv1 "code.vikunja.io/api/pkg/routes/api/v1"
|
||||||
"code.vikunja.io/api/pkg/routes/crud"
|
"code.vikunja.io/api/pkg/routes/crud"
|
||||||
|
"github.com/asaskevich/govalidator"
|
||||||
|
"github.com/labstack/echo"
|
||||||
|
"github.com/labstack/echo/middleware"
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
|
"github.com/swaggo/echo-swagger"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// CustomValidator is a dummy struct to use govalidator with echo
|
||||||
|
type CustomValidator struct{}
|
||||||
|
|
||||||
|
// Validate validates stuff
|
||||||
|
func (cv *CustomValidator) Validate(i interface{}) error {
|
||||||
|
if _, err := govalidator.ValidateStruct(i); err != nil {
|
||||||
|
|
||||||
|
var errs []string
|
||||||
|
for field, e := range govalidator.ErrorsByField(err) {
|
||||||
|
errs = append(errs, field+": "+e)
|
||||||
|
}
|
||||||
|
|
||||||
|
httperr := models.ValidationHTTPError{
|
||||||
|
models.HTTPError{
|
||||||
|
Code: models.ErrCodeInvalidData,
|
||||||
|
Message: "Invalid Data",
|
||||||
|
},
|
||||||
|
errs,
|
||||||
|
}
|
||||||
|
|
||||||
|
return httperr
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// NewEcho registers a new Echo instance
|
// NewEcho registers a new Echo instance
|
||||||
func NewEcho() *echo.Echo {
|
func NewEcho() *echo.Echo {
|
||||||
e := echo.New()
|
e := echo.New()
|
||||||
|
@ -33,6 +58,9 @@ func NewEcho() *echo.Echo {
|
||||||
Format: "${time_rfc3339_nano}: ${remote_ip} ${method} ${status} ${uri} ${latency_human} - ${user_agent}\n",
|
Format: "${time_rfc3339_nano}: ${remote_ip} ${method} ${status} ${uri} ${latency_human} - ${user_agent}\n",
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
// Validation
|
||||||
|
e.Validator = &CustomValidator{}
|
||||||
|
|
||||||
return e
|
return e
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
25
vendor/github.com/globalsign/mgo/LICENSE
generated
vendored
25
vendor/github.com/globalsign/mgo/LICENSE
generated
vendored
|
@ -1,25 +0,0 @@
|
||||||
mgo - MongoDB driver for Go
|
|
||||||
|
|
||||||
Copyright (c) 2010-2013 - Gustavo Niemeyer <gustavo@niemeyer.net>
|
|
||||||
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
||||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
25
vendor/github.com/globalsign/mgo/bson/LICENSE
generated
vendored
25
vendor/github.com/globalsign/mgo/bson/LICENSE
generated
vendored
|
@ -1,25 +0,0 @@
|
||||||
BSON library for Go
|
|
||||||
|
|
||||||
Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
|
|
||||||
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
||||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
12
vendor/github.com/globalsign/mgo/bson/README.md
generated
vendored
12
vendor/github.com/globalsign/mgo/bson/README.md
generated
vendored
|
@ -1,12 +0,0 @@
|
||||||
[![GoDoc](https://godoc.org/github.com/globalsign/mgo/bson?status.svg)](https://godoc.org/github.com/globalsign/mgo/bson)
|
|
||||||
|
|
||||||
An Implementation of BSON for Go
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
Package bson is an implementation of the [BSON specification](http://bsonspec.org) for Go.
|
|
||||||
|
|
||||||
While the BSON package implements the BSON spec as faithfully as possible, there
|
|
||||||
is some MongoDB specific behaviour (such as map keys `$in`, `$all`, etc) in the
|
|
||||||
`bson` package. The priority is for backwards compatibility for the `mgo`
|
|
||||||
driver, though fixes for obviously buggy behaviour is welcome (and features, etc
|
|
||||||
behind feature flags).
|
|
836
vendor/github.com/globalsign/mgo/bson/bson.go
generated
vendored
836
vendor/github.com/globalsign/mgo/bson/bson.go
generated
vendored
|
@ -1,836 +0,0 @@
|
||||||
// BSON library for Go
|
|
||||||
//
|
|
||||||
// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
|
|
||||||
//
|
|
||||||
// All rights reserved.
|
|
||||||
//
|
|
||||||
// Redistribution and use in source and binary forms, with or without
|
|
||||||
// modification, are permitted provided that the following conditions are met:
|
|
||||||
//
|
|
||||||
// 1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
// list of conditions and the following disclaimer.
|
|
||||||
// 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
// this list of conditions and the following disclaimer in the documentation
|
|
||||||
// and/or other materials provided with the distribution.
|
|
||||||
//
|
|
||||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
||||||
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
// Package bson is an implementation of the BSON specification for Go:
|
|
||||||
//
|
|
||||||
// http://bsonspec.org
|
|
||||||
//
|
|
||||||
// It was created as part of the mgo MongoDB driver for Go, but is standalone
|
|
||||||
// and may be used on its own without the driver.
|
|
||||||
package bson
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"crypto/md5"
|
|
||||||
"crypto/rand"
|
|
||||||
"encoding/binary"
|
|
||||||
"encoding/hex"
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"math"
|
|
||||||
"os"
|
|
||||||
"reflect"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"sync/atomic"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:generate go run bson_corpus_spec_test_generator.go
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// The public API.
|
|
||||||
|
|
||||||
// Element types constants from BSON specification.
|
|
||||||
const (
|
|
||||||
ElementFloat64 byte = 0x01
|
|
||||||
ElementString byte = 0x02
|
|
||||||
ElementDocument byte = 0x03
|
|
||||||
ElementArray byte = 0x04
|
|
||||||
ElementBinary byte = 0x05
|
|
||||||
Element06 byte = 0x06
|
|
||||||
ElementObjectId byte = 0x07
|
|
||||||
ElementBool byte = 0x08
|
|
||||||
ElementDatetime byte = 0x09
|
|
||||||
ElementNil byte = 0x0A
|
|
||||||
ElementRegEx byte = 0x0B
|
|
||||||
ElementDBPointer byte = 0x0C
|
|
||||||
ElementJavaScriptWithoutScope byte = 0x0D
|
|
||||||
ElementSymbol byte = 0x0E
|
|
||||||
ElementJavaScriptWithScope byte = 0x0F
|
|
||||||
ElementInt32 byte = 0x10
|
|
||||||
ElementTimestamp byte = 0x11
|
|
||||||
ElementInt64 byte = 0x12
|
|
||||||
ElementDecimal128 byte = 0x13
|
|
||||||
ElementMinKey byte = 0xFF
|
|
||||||
ElementMaxKey byte = 0x7F
|
|
||||||
|
|
||||||
BinaryGeneric byte = 0x00
|
|
||||||
BinaryFunction byte = 0x01
|
|
||||||
BinaryBinaryOld byte = 0x02
|
|
||||||
BinaryUUIDOld byte = 0x03
|
|
||||||
BinaryUUID byte = 0x04
|
|
||||||
BinaryMD5 byte = 0x05
|
|
||||||
BinaryUserDefined byte = 0x80
|
|
||||||
)
|
|
||||||
|
|
||||||
// Getter interface: a value implementing the bson.Getter interface will have its GetBSON
|
|
||||||
// method called when the given value has to be marshalled, and the result
|
|
||||||
// of this method will be marshaled in place of the actual object.
|
|
||||||
//
|
|
||||||
// If GetBSON returns return a non-nil error, the marshalling procedure
|
|
||||||
// will stop and error out with the provided value.
|
|
||||||
type Getter interface {
|
|
||||||
GetBSON() (interface{}, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Setter interface: a value implementing the bson.Setter interface will receive the BSON
|
|
||||||
// value via the SetBSON method during unmarshaling, and the object
|
|
||||||
// itself will not be changed as usual.
|
|
||||||
//
|
|
||||||
// If setting the value works, the method should return nil or alternatively
|
|
||||||
// bson.ErrSetZero to set the respective field to its zero value (nil for
|
|
||||||
// pointer types). If SetBSON returns a value of type bson.TypeError, the
|
|
||||||
// BSON value will be omitted from a map or slice being decoded and the
|
|
||||||
// unmarshalling will continue. If it returns any other non-nil error, the
|
|
||||||
// unmarshalling procedure will stop and error out with the provided value.
|
|
||||||
//
|
|
||||||
// This interface is generally useful in pointer receivers, since the method
|
|
||||||
// will want to change the receiver. A type field that implements the Setter
|
|
||||||
// interface doesn't have to be a pointer, though.
|
|
||||||
//
|
|
||||||
// Unlike the usual behavior, unmarshalling onto a value that implements a
|
|
||||||
// Setter interface will NOT reset the value to its zero state. This allows
|
|
||||||
// the value to decide by itself how to be unmarshalled.
|
|
||||||
//
|
|
||||||
// For example:
|
|
||||||
//
|
|
||||||
// type MyString string
|
|
||||||
//
|
|
||||||
// func (s *MyString) SetBSON(raw bson.Raw) error {
|
|
||||||
// return raw.Unmarshal(s)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
type Setter interface {
|
|
||||||
SetBSON(raw Raw) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrSetZero may be returned from a SetBSON method to have the value set to
|
|
||||||
// its respective zero value. When used in pointer values, this will set the
|
|
||||||
// field to nil rather than to the pre-allocated value.
|
|
||||||
var ErrSetZero = errors.New("set to zero")
|
|
||||||
|
|
||||||
// M is a convenient alias for a map[string]interface{} map, useful for
|
|
||||||
// dealing with BSON in a native way. For instance:
|
|
||||||
//
|
|
||||||
// bson.M{"a": 1, "b": true}
|
|
||||||
//
|
|
||||||
// There's no special handling for this type in addition to what's done anyway
|
|
||||||
// for an equivalent map type. Elements in the map will be dumped in an
|
|
||||||
// undefined ordered. See also the bson.D type for an ordered alternative.
|
|
||||||
type M map[string]interface{}
|
|
||||||
|
|
||||||
// D represents a BSON document containing ordered elements. For example:
|
|
||||||
//
|
|
||||||
// bson.D{{"a", 1}, {"b", true}}
|
|
||||||
//
|
|
||||||
// In some situations, such as when creating indexes for MongoDB, the order in
|
|
||||||
// which the elements are defined is important. If the order is not important,
|
|
||||||
// using a map is generally more comfortable. See bson.M and bson.RawD.
|
|
||||||
type D []DocElem
|
|
||||||
|
|
||||||
// DocElem is an element of the bson.D document representation.
|
|
||||||
type DocElem struct {
|
|
||||||
Name string
|
|
||||||
Value interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map returns a map out of the ordered element name/value pairs in d.
|
|
||||||
func (d D) Map() (m M) {
|
|
||||||
m = make(M, len(d))
|
|
||||||
for _, item := range d {
|
|
||||||
m[item.Name] = item.Value
|
|
||||||
}
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
|
|
||||||
// The Raw type represents raw unprocessed BSON documents and elements.
|
|
||||||
// Kind is the kind of element as defined per the BSON specification, and
|
|
||||||
// Data is the raw unprocessed data for the respective element.
|
|
||||||
// Using this type it is possible to unmarshal or marshal values partially.
|
|
||||||
//
|
|
||||||
// Relevant documentation:
|
|
||||||
//
|
|
||||||
// http://bsonspec.org/#/specification
|
|
||||||
//
|
|
||||||
type Raw struct {
|
|
||||||
Kind byte
|
|
||||||
Data []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
// RawD represents a BSON document containing raw unprocessed elements.
|
|
||||||
// This low-level representation may be useful when lazily processing
|
|
||||||
// documents of uncertain content, or when manipulating the raw content
|
|
||||||
// documents in general.
|
|
||||||
type RawD []RawDocElem
|
|
||||||
|
|
||||||
// RawDocElem elements of RawD type.
|
|
||||||
type RawDocElem struct {
|
|
||||||
Name string
|
|
||||||
Value Raw
|
|
||||||
}
|
|
||||||
|
|
||||||
// ObjectId is a unique ID identifying a BSON value. It must be exactly 12 bytes
|
|
||||||
// long. MongoDB objects by default have such a property set in their "_id"
|
|
||||||
// property.
|
|
||||||
//
|
|
||||||
// http://www.mongodb.org/display/DOCS/Object+Ids
|
|
||||||
type ObjectId string
|
|
||||||
|
|
||||||
// ObjectIdHex returns an ObjectId from the provided hex representation.
|
|
||||||
// Calling this function with an invalid hex representation will
|
|
||||||
// cause a runtime panic. See the IsObjectIdHex function.
|
|
||||||
func ObjectIdHex(s string) ObjectId {
|
|
||||||
d, err := hex.DecodeString(s)
|
|
||||||
if err != nil || len(d) != 12 {
|
|
||||||
panic(fmt.Sprintf("invalid input to ObjectIdHex: %q", s))
|
|
||||||
}
|
|
||||||
return ObjectId(d)
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsObjectIdHex returns whether s is a valid hex representation of
|
|
||||||
// an ObjectId. See the ObjectIdHex function.
|
|
||||||
func IsObjectIdHex(s string) bool {
|
|
||||||
if len(s) != 24 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
_, err := hex.DecodeString(s)
|
|
||||||
return err == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// objectIdCounter is atomically incremented when generating a new ObjectId
|
|
||||||
// using NewObjectId() function. It's used as a counter part of an id.
|
|
||||||
var objectIdCounter = readRandomUint32()
|
|
||||||
|
|
||||||
// readRandomUint32 returns a random objectIdCounter.
|
|
||||||
func readRandomUint32() uint32 {
|
|
||||||
var b [4]byte
|
|
||||||
_, err := io.ReadFull(rand.Reader, b[:])
|
|
||||||
if err != nil {
|
|
||||||
panic(fmt.Errorf("cannot read random object id: %v", err))
|
|
||||||
}
|
|
||||||
return uint32((uint32(b[0]) << 0) | (uint32(b[1]) << 8) | (uint32(b[2]) << 16) | (uint32(b[3]) << 24))
|
|
||||||
}
|
|
||||||
|
|
||||||
// machineId stores machine id generated once and used in subsequent calls
|
|
||||||
// to NewObjectId function.
|
|
||||||
var machineId = readMachineId()
|
|
||||||
var processId = os.Getpid()
|
|
||||||
|
|
||||||
// readMachineId generates and returns a machine id.
|
|
||||||
// If this function fails to get the hostname it will cause a runtime error.
|
|
||||||
func readMachineId() []byte {
|
|
||||||
var sum [3]byte
|
|
||||||
id := sum[:]
|
|
||||||
hostname, err1 := os.Hostname()
|
|
||||||
if err1 != nil {
|
|
||||||
_, err2 := io.ReadFull(rand.Reader, id)
|
|
||||||
if err2 != nil {
|
|
||||||
panic(fmt.Errorf("cannot get hostname: %v; %v", err1, err2))
|
|
||||||
}
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
hw := md5.New()
|
|
||||||
hw.Write([]byte(hostname))
|
|
||||||
copy(id, hw.Sum(nil))
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewObjectId returns a new unique ObjectId.
|
|
||||||
func NewObjectId() ObjectId {
|
|
||||||
var b [12]byte
|
|
||||||
// Timestamp, 4 bytes, big endian
|
|
||||||
binary.BigEndian.PutUint32(b[:], uint32(time.Now().Unix()))
|
|
||||||
// Machine, first 3 bytes of md5(hostname)
|
|
||||||
b[4] = machineId[0]
|
|
||||||
b[5] = machineId[1]
|
|
||||||
b[6] = machineId[2]
|
|
||||||
// Pid, 2 bytes, specs don't specify endianness, but we use big endian.
|
|
||||||
b[7] = byte(processId >> 8)
|
|
||||||
b[8] = byte(processId)
|
|
||||||
// Increment, 3 bytes, big endian
|
|
||||||
i := atomic.AddUint32(&objectIdCounter, 1)
|
|
||||||
b[9] = byte(i >> 16)
|
|
||||||
b[10] = byte(i >> 8)
|
|
||||||
b[11] = byte(i)
|
|
||||||
return ObjectId(b[:])
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewObjectIdWithTime returns a dummy ObjectId with the timestamp part filled
|
|
||||||
// with the provided number of seconds from epoch UTC, and all other parts
|
|
||||||
// filled with zeroes. It's not safe to insert a document with an id generated
|
|
||||||
// by this method, it is useful only for queries to find documents with ids
|
|
||||||
// generated before or after the specified timestamp.
|
|
||||||
func NewObjectIdWithTime(t time.Time) ObjectId {
|
|
||||||
var b [12]byte
|
|
||||||
binary.BigEndian.PutUint32(b[:4], uint32(t.Unix()))
|
|
||||||
return ObjectId(string(b[:]))
|
|
||||||
}
|
|
||||||
|
|
||||||
// String returns a hex string representation of the id.
|
|
||||||
// Example: ObjectIdHex("4d88e15b60f486e428412dc9").
|
|
||||||
func (id ObjectId) String() string {
|
|
||||||
return fmt.Sprintf(`ObjectIdHex("%x")`, string(id))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Hex returns a hex representation of the ObjectId.
|
|
||||||
func (id ObjectId) Hex() string {
|
|
||||||
return hex.EncodeToString([]byte(id))
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarshalJSON turns a bson.ObjectId into a json.Marshaller.
|
|
||||||
func (id ObjectId) MarshalJSON() ([]byte, error) {
|
|
||||||
return []byte(fmt.Sprintf(`"%x"`, string(id))), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var nullBytes = []byte("null")
|
|
||||||
|
|
||||||
// UnmarshalJSON turns *bson.ObjectId into a json.Unmarshaller.
|
|
||||||
func (id *ObjectId) UnmarshalJSON(data []byte) error {
|
|
||||||
if len(data) > 0 && (data[0] == '{' || data[0] == 'O') {
|
|
||||||
var v struct {
|
|
||||||
Id json.RawMessage `json:"$oid"`
|
|
||||||
Func struct {
|
|
||||||
Id json.RawMessage
|
|
||||||
} `json:"$oidFunc"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err == nil {
|
|
||||||
if len(v.Id) > 0 {
|
|
||||||
data = []byte(v.Id)
|
|
||||||
} else {
|
|
||||||
data = []byte(v.Func.Id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(data) == 2 && data[0] == '"' && data[1] == '"' || bytes.Equal(data, nullBytes) {
|
|
||||||
*id = ""
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if len(data) != 26 || data[0] != '"' || data[25] != '"' {
|
|
||||||
return fmt.Errorf("invalid ObjectId in JSON: %s", string(data))
|
|
||||||
}
|
|
||||||
var buf [12]byte
|
|
||||||
_, err := hex.Decode(buf[:], data[1:25])
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("invalid ObjectId in JSON: %s (%s)", string(data), err)
|
|
||||||
}
|
|
||||||
*id = ObjectId(string(buf[:]))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarshalText turns bson.ObjectId into an encoding.TextMarshaler.
|
|
||||||
func (id ObjectId) MarshalText() ([]byte, error) {
|
|
||||||
return []byte(fmt.Sprintf("%x", string(id))), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnmarshalText turns *bson.ObjectId into an encoding.TextUnmarshaler.
|
|
||||||
func (id *ObjectId) UnmarshalText(data []byte) error {
|
|
||||||
if len(data) == 1 && data[0] == ' ' || len(data) == 0 {
|
|
||||||
*id = ""
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if len(data) != 24 {
|
|
||||||
return fmt.Errorf("invalid ObjectId: %s", data)
|
|
||||||
}
|
|
||||||
var buf [12]byte
|
|
||||||
_, err := hex.Decode(buf[:], data[:])
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("invalid ObjectId: %s (%s)", data, err)
|
|
||||||
}
|
|
||||||
*id = ObjectId(string(buf[:]))
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Valid returns true if id is valid. A valid id must contain exactly 12 bytes.
|
|
||||||
func (id ObjectId) Valid() bool {
|
|
||||||
return len(id) == 12
|
|
||||||
}
|
|
||||||
|
|
||||||
// byteSlice returns byte slice of id from start to end.
|
|
||||||
// Calling this function with an invalid id will cause a runtime panic.
|
|
||||||
func (id ObjectId) byteSlice(start, end int) []byte {
|
|
||||||
if len(id) != 12 {
|
|
||||||
panic(fmt.Sprintf("invalid ObjectId: %q", string(id)))
|
|
||||||
}
|
|
||||||
return []byte(string(id)[start:end])
|
|
||||||
}
|
|
||||||
|
|
||||||
// Time returns the timestamp part of the id.
|
|
||||||
// It's a runtime error to call this method with an invalid id.
|
|
||||||
func (id ObjectId) Time() time.Time {
|
|
||||||
// First 4 bytes of ObjectId is 32-bit big-endian seconds from epoch.
|
|
||||||
secs := int64(binary.BigEndian.Uint32(id.byteSlice(0, 4)))
|
|
||||||
return time.Unix(secs, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Machine returns the 3-byte machine id part of the id.
|
|
||||||
// It's a runtime error to call this method with an invalid id.
|
|
||||||
func (id ObjectId) Machine() []byte {
|
|
||||||
return id.byteSlice(4, 7)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pid returns the process id part of the id.
|
|
||||||
// It's a runtime error to call this method with an invalid id.
|
|
||||||
func (id ObjectId) Pid() uint16 {
|
|
||||||
return binary.BigEndian.Uint16(id.byteSlice(7, 9))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Counter returns the incrementing value part of the id.
|
|
||||||
// It's a runtime error to call this method with an invalid id.
|
|
||||||
func (id ObjectId) Counter() int32 {
|
|
||||||
b := id.byteSlice(9, 12)
|
|
||||||
// Counter is stored as big-endian 3-byte value
|
|
||||||
return int32(uint32(b[0])<<16 | uint32(b[1])<<8 | uint32(b[2]))
|
|
||||||
}
|
|
||||||
|
|
||||||
// The Symbol type is similar to a string and is used in languages with a
|
|
||||||
// distinct symbol type.
|
|
||||||
type Symbol string
|
|
||||||
|
|
||||||
// Now returns the current time with millisecond precision. MongoDB stores
|
|
||||||
// timestamps with the same precision, so a Time returned from this method
|
|
||||||
// will not change after a roundtrip to the database. That's the only reason
|
|
||||||
// why this function exists. Using the time.Now function also works fine
|
|
||||||
// otherwise.
|
|
||||||
func Now() time.Time {
|
|
||||||
return time.Unix(0, time.Now().UnixNano()/1e6*1e6)
|
|
||||||
}
|
|
||||||
|
|
||||||
// MongoTimestamp is a special internal type used by MongoDB that for some
|
|
||||||
// strange reason has its own datatype defined in BSON.
|
|
||||||
type MongoTimestamp int64
|
|
||||||
|
|
||||||
// Time returns the time part of ts which is stored with second precision.
|
|
||||||
func (ts MongoTimestamp) Time() time.Time {
|
|
||||||
return time.Unix(int64(uint64(ts)>>32), 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Counter returns the counter part of ts.
|
|
||||||
func (ts MongoTimestamp) Counter() uint32 {
|
|
||||||
return uint32(ts)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewMongoTimestamp creates a timestamp using the given
|
|
||||||
// date `t` (with second precision) and counter `c` (unique for `t`).
|
|
||||||
//
|
|
||||||
// Returns an error if time `t` is not between 1970-01-01T00:00:00Z
|
|
||||||
// and 2106-02-07T06:28:15Z (inclusive).
|
|
||||||
//
|
|
||||||
// Note that two MongoTimestamps should never have the same (time, counter) combination:
|
|
||||||
// the caller must ensure the counter `c` is increased if creating multiple MongoTimestamp
|
|
||||||
// values for the same time `t` (ignoring fractions of seconds).
|
|
||||||
func NewMongoTimestamp(t time.Time, c uint32) (MongoTimestamp, error) {
|
|
||||||
u := t.Unix()
|
|
||||||
if u < 0 || u > math.MaxUint32 {
|
|
||||||
return -1, errors.New("invalid value for time")
|
|
||||||
}
|
|
||||||
|
|
||||||
i := int64(u<<32 | int64(c))
|
|
||||||
|
|
||||||
return MongoTimestamp(i), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type orderKey int64
|
|
||||||
|
|
||||||
// MaxKey is a special value that compares higher than all other possible BSON
|
|
||||||
// values in a MongoDB database.
|
|
||||||
var MaxKey = orderKey(1<<63 - 1)
|
|
||||||
|
|
||||||
// MinKey is a special value that compares lower than all other possible BSON
|
|
||||||
// values in a MongoDB database.
|
|
||||||
var MinKey = orderKey(-1 << 63)
|
|
||||||
|
|
||||||
type undefined struct{}
|
|
||||||
|
|
||||||
// Undefined represents the undefined BSON value.
|
|
||||||
var Undefined undefined
|
|
||||||
|
|
||||||
// Binary is a representation for non-standard binary values. Any kind should
|
|
||||||
// work, but the following are known as of this writing:
|
|
||||||
//
|
|
||||||
// 0x00 - Generic. This is decoded as []byte(data), not Binary{0x00, data}.
|
|
||||||
// 0x01 - Function (!?)
|
|
||||||
// 0x02 - Obsolete generic.
|
|
||||||
// 0x03 - UUID
|
|
||||||
// 0x05 - MD5
|
|
||||||
// 0x80 - User defined.
|
|
||||||
//
|
|
||||||
type Binary struct {
|
|
||||||
Kind byte
|
|
||||||
Data []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
// RegEx represents a regular expression. The Options field may contain
|
|
||||||
// individual characters defining the way in which the pattern should be
|
|
||||||
// applied, and must be sorted. Valid options as of this writing are 'i' for
|
|
||||||
// case insensitive matching, 'm' for multi-line matching, 'x' for verbose
|
|
||||||
// mode, 'l' to make \w, \W, and similar be locale-dependent, 's' for dot-all
|
|
||||||
// mode (a '.' matches everything), and 'u' to make \w, \W, and similar match
|
|
||||||
// unicode. The value of the Options parameter is not verified before being
|
|
||||||
// marshaled into the BSON format.
|
|
||||||
type RegEx struct {
|
|
||||||
Pattern string
|
|
||||||
Options string
|
|
||||||
}
|
|
||||||
|
|
||||||
// JavaScript is a type that holds JavaScript code. If Scope is non-nil, it
|
|
||||||
// will be marshaled as a mapping from identifiers to values that may be
|
|
||||||
// used when evaluating the provided Code.
|
|
||||||
type JavaScript struct {
|
|
||||||
Code string
|
|
||||||
Scope interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DBPointer refers to a document id in a namespace.
|
|
||||||
//
|
|
||||||
// This type is deprecated in the BSON specification and should not be used
|
|
||||||
// except for backwards compatibility with ancient applications.
|
|
||||||
type DBPointer struct {
|
|
||||||
Namespace string
|
|
||||||
Id ObjectId
|
|
||||||
}
|
|
||||||
|
|
||||||
const initialBufferSize = 64
|
|
||||||
|
|
||||||
func handleErr(err *error) {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
if _, ok := r.(runtime.Error); ok {
|
|
||||||
panic(r)
|
|
||||||
} else if _, ok := r.(externalPanic); ok {
|
|
||||||
panic(r)
|
|
||||||
} else if s, ok := r.(string); ok {
|
|
||||||
*err = errors.New(s)
|
|
||||||
} else if e, ok := r.(error); ok {
|
|
||||||
*err = e
|
|
||||||
} else {
|
|
||||||
panic(r)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Marshal serializes the in value, which may be a map or a struct value.
|
|
||||||
// In the case of struct values, only exported fields will be serialized,
|
|
||||||
// and the order of serialized fields will match that of the struct itself.
|
|
||||||
// The lowercased field name is used as the key for each exported field,
|
|
||||||
// but this behavior may be changed using the respective field tag.
|
|
||||||
// The tag may also contain flags to tweak the marshalling behavior for
|
|
||||||
// the field. The tag formats accepted are:
|
|
||||||
//
|
|
||||||
// "[<key>][,<flag1>[,<flag2>]]"
|
|
||||||
//
|
|
||||||
// `(...) bson:"[<key>][,<flag1>[,<flag2>]]" (...)`
|
|
||||||
//
|
|
||||||
// The following flags are currently supported:
|
|
||||||
//
|
|
||||||
// omitempty Only include the field if it's not set to the zero
|
|
||||||
// value for the type or to empty slices or maps.
|
|
||||||
//
|
|
||||||
// minsize Marshal an int64 value as an int32, if that's feasible
|
|
||||||
// while preserving the numeric value.
|
|
||||||
//
|
|
||||||
// inline Inline the field, which must be a struct or a map,
|
|
||||||
// causing all of its fields or keys to be processed as if
|
|
||||||
// they were part of the outer struct. For maps, keys must
|
|
||||||
// not conflict with the bson keys of other struct fields.
|
|
||||||
//
|
|
||||||
// Some examples:
|
|
||||||
//
|
|
||||||
// type T struct {
|
|
||||||
// A bool
|
|
||||||
// B int "myb"
|
|
||||||
// C string "myc,omitempty"
|
|
||||||
// D string `bson:",omitempty" json:"jsonkey"`
|
|
||||||
// E int64 ",minsize"
|
|
||||||
// F int64 "myf,omitempty,minsize"
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
func Marshal(in interface{}) (out []byte, err error) {
|
|
||||||
return MarshalBuffer(in, make([]byte, 0, initialBufferSize))
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarshalBuffer behaves the same way as Marshal, except that instead of
|
|
||||||
// allocating a new byte slice it tries to use the received byte slice and
|
|
||||||
// only allocates more memory if necessary to fit the marshaled value.
|
|
||||||
func MarshalBuffer(in interface{}, buf []byte) (out []byte, err error) {
|
|
||||||
defer handleErr(&err)
|
|
||||||
e := &encoder{buf}
|
|
||||||
e.addDoc(reflect.ValueOf(in))
|
|
||||||
return e.out, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unmarshal deserializes data from in into the out value. The out value
|
|
||||||
// must be a map, a pointer to a struct, or a pointer to a bson.D value.
|
|
||||||
// In the case of struct values, only exported fields will be deserialized.
|
|
||||||
// The lowercased field name is used as the key for each exported field,
|
|
||||||
// but this behavior may be changed using the respective field tag.
|
|
||||||
// The tag may also contain flags to tweak the marshalling behavior for
|
|
||||||
// the field. The tag formats accepted are:
|
|
||||||
//
|
|
||||||
// "[<key>][,<flag1>[,<flag2>]]"
|
|
||||||
//
|
|
||||||
// `(...) bson:"[<key>][,<flag1>[,<flag2>]]" (...)`
|
|
||||||
//
|
|
||||||
// The following flags are currently supported during unmarshal (see the
|
|
||||||
// Marshal method for other flags):
|
|
||||||
//
|
|
||||||
// inline Inline the field, which must be a struct or a map.
|
|
||||||
// Inlined structs are handled as if its fields were part
|
|
||||||
// of the outer struct. An inlined map causes keys that do
|
|
||||||
// not match any other struct field to be inserted in the
|
|
||||||
// map rather than being discarded as usual.
|
|
||||||
//
|
|
||||||
// The target field or element types of out may not necessarily match
|
|
||||||
// the BSON values of the provided data. The following conversions are
|
|
||||||
// made automatically:
|
|
||||||
//
|
|
||||||
// - Numeric types are converted if at least the integer part of the
|
|
||||||
// value would be preserved correctly
|
|
||||||
// - Bools are converted to numeric types as 1 or 0
|
|
||||||
// - Numeric types are converted to bools as true if not 0 or false otherwise
|
|
||||||
// - Binary and string BSON data is converted to a string, array or byte slice
|
|
||||||
//
|
|
||||||
// If the value would not fit the type and cannot be converted, it's
|
|
||||||
// silently skipped.
|
|
||||||
//
|
|
||||||
// Pointer values are initialized when necessary.
|
|
||||||
func Unmarshal(in []byte, out interface{}) (err error) {
|
|
||||||
if raw, ok := out.(*Raw); ok {
|
|
||||||
raw.Kind = 3
|
|
||||||
raw.Data = in
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
defer handleErr(&err)
|
|
||||||
v := reflect.ValueOf(out)
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.Ptr:
|
|
||||||
fallthrough
|
|
||||||
case reflect.Map:
|
|
||||||
d := newDecoder(in)
|
|
||||||
d.readDocTo(v)
|
|
||||||
if d.i < len(d.in) {
|
|
||||||
return errors.New("document is corrupted")
|
|
||||||
}
|
|
||||||
case reflect.Struct:
|
|
||||||
return errors.New("unmarshal can't deal with struct values. Use a pointer")
|
|
||||||
default:
|
|
||||||
return errors.New("unmarshal needs a map or a pointer to a struct")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unmarshal deserializes raw into the out value. If the out value type
|
|
||||||
// is not compatible with raw, a *bson.TypeError is returned.
|
|
||||||
//
|
|
||||||
// See the Unmarshal function documentation for more details on the
|
|
||||||
// unmarshalling process.
|
|
||||||
func (raw Raw) Unmarshal(out interface{}) (err error) {
|
|
||||||
defer handleErr(&err)
|
|
||||||
v := reflect.ValueOf(out)
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.Ptr:
|
|
||||||
v = v.Elem()
|
|
||||||
fallthrough
|
|
||||||
case reflect.Map:
|
|
||||||
d := newDecoder(raw.Data)
|
|
||||||
good := d.readElemTo(v, raw.Kind)
|
|
||||||
if !good {
|
|
||||||
return &TypeError{v.Type(), raw.Kind}
|
|
||||||
}
|
|
||||||
case reflect.Struct:
|
|
||||||
return errors.New("raw Unmarshal can't deal with struct values. Use a pointer")
|
|
||||||
default:
|
|
||||||
return errors.New("raw Unmarshal needs a map or a valid pointer")
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// TypeError store details for type error occuring
|
|
||||||
// during unmarshaling
|
|
||||||
type TypeError struct {
|
|
||||||
Type reflect.Type
|
|
||||||
Kind byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *TypeError) Error() string {
|
|
||||||
return fmt.Sprintf("BSON kind 0x%02x isn't compatible with type %s", e.Kind, e.Type.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// Maintain a mapping of keys to structure field indexes
|
|
||||||
|
|
||||||
type structInfo struct {
|
|
||||||
FieldsMap map[string]fieldInfo
|
|
||||||
FieldsList []fieldInfo
|
|
||||||
InlineMap int
|
|
||||||
Zero reflect.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
type fieldInfo struct {
|
|
||||||
Key string
|
|
||||||
Num int
|
|
||||||
OmitEmpty bool
|
|
||||||
MinSize bool
|
|
||||||
Inline []int
|
|
||||||
}
|
|
||||||
|
|
||||||
var structMap = make(map[reflect.Type]*structInfo)
|
|
||||||
var structMapMutex sync.RWMutex
|
|
||||||
|
|
||||||
type externalPanic string
|
|
||||||
|
|
||||||
func (e externalPanic) String() string {
|
|
||||||
return string(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
func getStructInfo(st reflect.Type) (*structInfo, error) {
|
|
||||||
structMapMutex.RLock()
|
|
||||||
sinfo, found := structMap[st]
|
|
||||||
structMapMutex.RUnlock()
|
|
||||||
if found {
|
|
||||||
return sinfo, nil
|
|
||||||
}
|
|
||||||
n := st.NumField()
|
|
||||||
fieldsMap := make(map[string]fieldInfo)
|
|
||||||
fieldsList := make([]fieldInfo, 0, n)
|
|
||||||
inlineMap := -1
|
|
||||||
for i := 0; i != n; i++ {
|
|
||||||
field := st.Field(i)
|
|
||||||
if field.PkgPath != "" && !field.Anonymous {
|
|
||||||
continue // Private field
|
|
||||||
}
|
|
||||||
|
|
||||||
info := fieldInfo{Num: i}
|
|
||||||
|
|
||||||
tag := field.Tag.Get("bson")
|
|
||||||
|
|
||||||
// Fall-back to JSON struct tag, if feature flag is set.
|
|
||||||
if tag == "" && useJSONTagFallback {
|
|
||||||
tag = field.Tag.Get("json")
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there's no bson/json tag available.
|
|
||||||
if tag == "" {
|
|
||||||
// If there's no tag, and also no tag: value splits (i.e. no colon)
|
|
||||||
// then assume the entire tag is the value
|
|
||||||
if strings.Index(string(field.Tag), ":") < 0 {
|
|
||||||
tag = string(field.Tag)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if tag == "-" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
inline := false
|
|
||||||
fields := strings.Split(tag, ",")
|
|
||||||
if len(fields) > 1 {
|
|
||||||
for _, flag := range fields[1:] {
|
|
||||||
switch flag {
|
|
||||||
case "omitempty":
|
|
||||||
info.OmitEmpty = true
|
|
||||||
case "minsize":
|
|
||||||
info.MinSize = true
|
|
||||||
case "inline":
|
|
||||||
inline = true
|
|
||||||
default:
|
|
||||||
msg := fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st)
|
|
||||||
panic(externalPanic(msg))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tag = fields[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
if inline {
|
|
||||||
switch field.Type.Kind() {
|
|
||||||
case reflect.Map:
|
|
||||||
if inlineMap >= 0 {
|
|
||||||
return nil, errors.New("Multiple ,inline maps in struct " + st.String())
|
|
||||||
}
|
|
||||||
if field.Type.Key() != reflect.TypeOf("") {
|
|
||||||
return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String())
|
|
||||||
}
|
|
||||||
inlineMap = info.Num
|
|
||||||
case reflect.Ptr:
|
|
||||||
// allow only pointer to struct
|
|
||||||
if kind := field.Type.Elem().Kind(); kind != reflect.Struct {
|
|
||||||
return nil, errors.New("Option ,inline allows a pointer only to a struct, was given pointer to " + kind.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
field.Type = field.Type.Elem()
|
|
||||||
fallthrough
|
|
||||||
case reflect.Struct:
|
|
||||||
sinfo, err := getStructInfo(field.Type)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for _, finfo := range sinfo.FieldsList {
|
|
||||||
if _, found := fieldsMap[finfo.Key]; found {
|
|
||||||
msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String()
|
|
||||||
return nil, errors.New(msg)
|
|
||||||
}
|
|
||||||
if finfo.Inline == nil {
|
|
||||||
finfo.Inline = []int{i, finfo.Num}
|
|
||||||
} else {
|
|
||||||
finfo.Inline = append([]int{i}, finfo.Inline...)
|
|
||||||
}
|
|
||||||
fieldsMap[finfo.Key] = finfo
|
|
||||||
fieldsList = append(fieldsList, finfo)
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
panic("Option ,inline needs a struct value or a pointer to a struct or map field")
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if tag != "" {
|
|
||||||
info.Key = tag
|
|
||||||
} else {
|
|
||||||
info.Key = strings.ToLower(field.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, found = fieldsMap[info.Key]; found {
|
|
||||||
msg := "Duplicated key '" + info.Key + "' in struct " + st.String()
|
|
||||||
return nil, errors.New(msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
fieldsList = append(fieldsList, info)
|
|
||||||
fieldsMap[info.Key] = info
|
|
||||||
}
|
|
||||||
sinfo = &structInfo{
|
|
||||||
fieldsMap,
|
|
||||||
fieldsList,
|
|
||||||
inlineMap,
|
|
||||||
reflect.New(st).Elem(),
|
|
||||||
}
|
|
||||||
structMapMutex.Lock()
|
|
||||||
structMap[st] = sinfo
|
|
||||||
structMapMutex.Unlock()
|
|
||||||
return sinfo, nil
|
|
||||||
}
|
|
294
vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go
generated
vendored
294
vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go
generated
vendored
|
@ -1,294 +0,0 @@
|
||||||
// +build ignore
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"go/format"
|
|
||||||
"html/template"
|
|
||||||
"io/ioutil"
|
|
||||||
"log"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/globalsign/mgo/internal/json"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
log.SetFlags(0)
|
|
||||||
log.SetPrefix(name + ": ")
|
|
||||||
|
|
||||||
var g Generator
|
|
||||||
|
|
||||||
fmt.Fprintf(&g, "// Code generated by \"%s.go\"; DO NOT EDIT\n\n", name)
|
|
||||||
|
|
||||||
src := g.generate()
|
|
||||||
|
|
||||||
err := ioutil.WriteFile(fmt.Sprintf("%s.go", strings.TrimSuffix(name, "_generator")), src, 0644)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("writing output: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generator holds the state of the analysis. Primarily used to buffer
|
|
||||||
// the output for format.Source.
|
|
||||||
type Generator struct {
|
|
||||||
bytes.Buffer // Accumulated output.
|
|
||||||
}
|
|
||||||
|
|
||||||
// format returns the gofmt-ed contents of the Generator's buffer.
|
|
||||||
func (g *Generator) format() []byte {
|
|
||||||
src, err := format.Source(g.Bytes())
|
|
||||||
if err != nil {
|
|
||||||
// Should never happen, but can arise when developing this code.
|
|
||||||
// The user can compile the output to see the error.
|
|
||||||
log.Printf("warning: internal error: invalid Go generated: %s", err)
|
|
||||||
log.Printf("warning: compile the package to analyze the error")
|
|
||||||
return g.Bytes()
|
|
||||||
}
|
|
||||||
return src
|
|
||||||
}
|
|
||||||
|
|
||||||
// EVERYTHING ABOVE IS CONSTANT BETWEEN THE GENERATORS
|
|
||||||
|
|
||||||
const name = "bson_corpus_spec_test_generator"
|
|
||||||
|
|
||||||
func (g *Generator) generate() []byte {
|
|
||||||
|
|
||||||
testFiles, err := filepath.Glob("./specdata/specifications/source/bson-corpus/tests/*.json")
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("error reading bson-corpus files: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tests, err := g.loadTests(testFiles)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("error loading tests: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tmpl, err := g.getTemplate()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("error loading template: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tmpl.Execute(&g.Buffer, tests)
|
|
||||||
|
|
||||||
return g.format()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) loadTests(filenames []string) ([]*testDef, error) {
|
|
||||||
var tests []*testDef
|
|
||||||
for _, filename := range filenames {
|
|
||||||
test, err := g.loadTest(filename)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
tests = append(tests, test)
|
|
||||||
}
|
|
||||||
|
|
||||||
return tests, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) loadTest(filename string) (*testDef, error) {
|
|
||||||
content, err := ioutil.ReadFile(filename)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var testDef testDef
|
|
||||||
err = json.Unmarshal(content, &testDef)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
names := make(map[string]struct{})
|
|
||||||
|
|
||||||
for i := len(testDef.Valid) - 1; i >= 0; i-- {
|
|
||||||
if testDef.BsonType == "0x05" && testDef.Valid[i].Description == "subtype 0x02" {
|
|
||||||
testDef.Valid = append(testDef.Valid[:i], testDef.Valid[i+1:]...)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
name := cleanupFuncName(testDef.Description + "_" + testDef.Valid[i].Description)
|
|
||||||
nameIdx := name
|
|
||||||
j := 1
|
|
||||||
for {
|
|
||||||
if _, ok := names[nameIdx]; !ok {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
nameIdx = fmt.Sprintf("%s_%d", name, j)
|
|
||||||
}
|
|
||||||
|
|
||||||
names[nameIdx] = struct{}{}
|
|
||||||
|
|
||||||
testDef.Valid[i].TestDef = &testDef
|
|
||||||
testDef.Valid[i].Name = nameIdx
|
|
||||||
testDef.Valid[i].StructTest = testDef.TestKey != "" &&
|
|
||||||
(testDef.BsonType != "0x05" || strings.Contains(testDef.Valid[i].Description, "0x00")) &&
|
|
||||||
!testDef.Deprecated
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := len(testDef.DecodeErrors) - 1; i >= 0; i-- {
|
|
||||||
if strings.Contains(testDef.DecodeErrors[i].Description, "UTF-8") {
|
|
||||||
testDef.DecodeErrors = append(testDef.DecodeErrors[:i], testDef.DecodeErrors[i+1:]...)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
name := cleanupFuncName(testDef.Description + "_" + testDef.DecodeErrors[i].Description)
|
|
||||||
nameIdx := name
|
|
||||||
j := 1
|
|
||||||
for {
|
|
||||||
if _, ok := names[nameIdx]; !ok {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
nameIdx = fmt.Sprintf("%s_%d", name, j)
|
|
||||||
}
|
|
||||||
names[nameIdx] = struct{}{}
|
|
||||||
|
|
||||||
testDef.DecodeErrors[i].Name = nameIdx
|
|
||||||
}
|
|
||||||
|
|
||||||
return &testDef, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) getTemplate() (*template.Template, error) {
|
|
||||||
content := `package bson_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/hex"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
. "gopkg.in/check.v1"
|
|
||||||
"github.com/globalsign/mgo/bson"
|
|
||||||
)
|
|
||||||
|
|
||||||
func testValid(c *C, in []byte, expected []byte, result interface{}) {
|
|
||||||
err := bson.Unmarshal(in, result)
|
|
||||||
c.Assert(err, IsNil)
|
|
||||||
|
|
||||||
out, err := bson.Marshal(result)
|
|
||||||
c.Assert(err, IsNil)
|
|
||||||
|
|
||||||
c.Assert(string(expected), Equals, string(out), Commentf("roundtrip failed for %T, expected '%x' but got '%x'", result, expected, out))
|
|
||||||
}
|
|
||||||
|
|
||||||
func testDecodeSkip(c *C, in []byte) {
|
|
||||||
err := bson.Unmarshal(in, &struct{}{})
|
|
||||||
c.Assert(err, IsNil)
|
|
||||||
}
|
|
||||||
|
|
||||||
func testDecodeError(c *C, in []byte, result interface{}) {
|
|
||||||
err := bson.Unmarshal(in, result)
|
|
||||||
c.Assert(err, Not(IsNil))
|
|
||||||
}
|
|
||||||
|
|
||||||
{{range .}}
|
|
||||||
{{range .Valid}}
|
|
||||||
func (s *S) Test{{.Name}}(c *C) {
|
|
||||||
b, err := hex.DecodeString("{{.Bson}}")
|
|
||||||
c.Assert(err, IsNil)
|
|
||||||
|
|
||||||
{{if .CanonicalBson}}
|
|
||||||
cb, err := hex.DecodeString("{{.CanonicalBson}}")
|
|
||||||
c.Assert(err, IsNil)
|
|
||||||
{{else}}
|
|
||||||
cb := b
|
|
||||||
{{end}}
|
|
||||||
|
|
||||||
var resultD bson.D
|
|
||||||
testValid(c, b, cb, &resultD)
|
|
||||||
{{if .StructTest}}var resultS struct {
|
|
||||||
Element {{.TestDef.GoType}} ` + "`bson:\"{{.TestDef.TestKey}}\"`" + `
|
|
||||||
}
|
|
||||||
testValid(c, b, cb, &resultS){{end}}
|
|
||||||
|
|
||||||
testDecodeSkip(c, b)
|
|
||||||
}
|
|
||||||
{{end}}
|
|
||||||
|
|
||||||
{{range .DecodeErrors}}
|
|
||||||
func (s *S) Test{{.Name}}(c *C) {
|
|
||||||
b, err := hex.DecodeString("{{.Bson}}")
|
|
||||||
c.Assert(err, IsNil)
|
|
||||||
|
|
||||||
var resultD bson.D
|
|
||||||
testDecodeError(c, b, &resultD)
|
|
||||||
}
|
|
||||||
{{end}}
|
|
||||||
{{end}}
|
|
||||||
`
|
|
||||||
tmpl, err := template.New("").Parse(content)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return tmpl, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func cleanupFuncName(name string) string {
|
|
||||||
return strings.Map(func(r rune) rune {
|
|
||||||
if (r >= 48 && r <= 57) || (r >= 65 && r <= 90) || (r >= 97 && r <= 122) {
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
return '_'
|
|
||||||
}, name)
|
|
||||||
}
|
|
||||||
|
|
||||||
type testDef struct {
|
|
||||||
Description string `json:"description"`
|
|
||||||
BsonType string `json:"bson_type"`
|
|
||||||
TestKey string `json:"test_key"`
|
|
||||||
Valid []*valid `json:"valid"`
|
|
||||||
DecodeErrors []*decodeError `json:"decodeErrors"`
|
|
||||||
Deprecated bool `json:"deprecated"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *testDef) GoType() string {
|
|
||||||
switch t.BsonType {
|
|
||||||
case "0x01":
|
|
||||||
return "float64"
|
|
||||||
case "0x02":
|
|
||||||
return "string"
|
|
||||||
case "0x03":
|
|
||||||
return "bson.D"
|
|
||||||
case "0x04":
|
|
||||||
return "[]interface{}"
|
|
||||||
case "0x05":
|
|
||||||
return "[]byte"
|
|
||||||
case "0x07":
|
|
||||||
return "bson.ObjectId"
|
|
||||||
case "0x08":
|
|
||||||
return "bool"
|
|
||||||
case "0x09":
|
|
||||||
return "time.Time"
|
|
||||||
case "0x0E":
|
|
||||||
return "string"
|
|
||||||
case "0x10":
|
|
||||||
return "int32"
|
|
||||||
case "0x12":
|
|
||||||
return "int64"
|
|
||||||
case "0x13":
|
|
||||||
return "bson.Decimal"
|
|
||||||
default:
|
|
||||||
return "interface{}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type valid struct {
|
|
||||||
Description string `json:"description"`
|
|
||||||
Bson string `json:"bson"`
|
|
||||||
CanonicalBson string `json:"canonical_bson"`
|
|
||||||
|
|
||||||
Name string
|
|
||||||
StructTest bool
|
|
||||||
TestDef *testDef
|
|
||||||
}
|
|
||||||
|
|
||||||
type decodeError struct {
|
|
||||||
Description string `json:"description"`
|
|
||||||
Bson string `json:"bson"`
|
|
||||||
|
|
||||||
Name string
|
|
||||||
}
|
|
29
vendor/github.com/globalsign/mgo/bson/compatibility.go
generated
vendored
29
vendor/github.com/globalsign/mgo/bson/compatibility.go
generated
vendored
|
@ -1,29 +0,0 @@
|
||||||
package bson
|
|
||||||
|
|
||||||
// Current state of the JSON tag fallback option.
|
|
||||||
var useJSONTagFallback = false
|
|
||||||
var useRespectNilValues = false
|
|
||||||
|
|
||||||
// SetJSONTagFallback enables or disables the JSON-tag fallback for structure tagging. When this is enabled, structures
|
|
||||||
// without BSON tags on a field will fall-back to using the JSON tag (if present).
|
|
||||||
func SetJSONTagFallback(state bool) {
|
|
||||||
useJSONTagFallback = state
|
|
||||||
}
|
|
||||||
|
|
||||||
// JSONTagFallbackState returns the current status of the JSON tag fallback compatability option. See SetJSONTagFallback
|
|
||||||
// for more information.
|
|
||||||
func JSONTagFallbackState() bool {
|
|
||||||
return useJSONTagFallback
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetRespectNilValues enables or disables serializing nil slices or maps to `null` values.
|
|
||||||
// In other words it enables `encoding/json` compatible behaviour.
|
|
||||||
func SetRespectNilValues(state bool) {
|
|
||||||
useRespectNilValues = state
|
|
||||||
}
|
|
||||||
|
|
||||||
// RespectNilValuesState returns the current status of the JSON nil slices and maps fallback compatibility option.
|
|
||||||
// See SetRespectNilValues for more information.
|
|
||||||
func RespectNilValuesState() bool {
|
|
||||||
return useRespectNilValues
|
|
||||||
}
|
|
312
vendor/github.com/globalsign/mgo/bson/decimal.go
generated
vendored
312
vendor/github.com/globalsign/mgo/bson/decimal.go
generated
vendored
|
@ -1,312 +0,0 @@
|
||||||
// BSON library for Go
|
|
||||||
//
|
|
||||||
// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
|
|
||||||
//
|
|
||||||
// All rights reserved.
|
|
||||||
//
|
|
||||||
// Redistribution and use in source and binary forms, with or without
|
|
||||||
// modification, are permitted provided that the following conditions are met:
|
|
||||||
//
|
|
||||||
// 1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
// list of conditions and the following disclaimer.
|
|
||||||
// 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
// this list of conditions and the following disclaimer in the documentation
|
|
||||||
// and/or other materials provided with the distribution.
|
|
||||||
//
|
|
||||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
||||||
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
package bson
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Decimal128 holds decimal128 BSON values.
|
|
||||||
type Decimal128 struct {
|
|
||||||
h, l uint64
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d Decimal128) String() string {
|
|
||||||
var pos int // positive sign
|
|
||||||
var e int // exponent
|
|
||||||
var h, l uint64 // significand high/low
|
|
||||||
|
|
||||||
if d.h>>63&1 == 0 {
|
|
||||||
pos = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
switch d.h >> 58 & (1<<5 - 1) {
|
|
||||||
case 0x1F:
|
|
||||||
return "NaN"
|
|
||||||
case 0x1E:
|
|
||||||
return "-Inf"[pos:]
|
|
||||||
}
|
|
||||||
|
|
||||||
l = d.l
|
|
||||||
if d.h>>61&3 == 3 {
|
|
||||||
// Bits: 1*sign 2*ignored 14*exponent 111*significand.
|
|
||||||
// Implicit 0b100 prefix in significand.
|
|
||||||
e = int(d.h>>47&(1<<14-1)) - 6176
|
|
||||||
//h = 4<<47 | d.h&(1<<47-1)
|
|
||||||
// Spec says all of these values are out of range.
|
|
||||||
h, l = 0, 0
|
|
||||||
} else {
|
|
||||||
// Bits: 1*sign 14*exponent 113*significand
|
|
||||||
e = int(d.h>>49&(1<<14-1)) - 6176
|
|
||||||
h = d.h & (1<<49 - 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Would be handled by the logic below, but that's trivial and common.
|
|
||||||
if h == 0 && l == 0 && e == 0 {
|
|
||||||
return "-0"[pos:]
|
|
||||||
}
|
|
||||||
|
|
||||||
var repr [48]byte // Loop 5 times over 9 digits plus dot, negative sign, and leading zero.
|
|
||||||
var last = len(repr)
|
|
||||||
var i = len(repr)
|
|
||||||
var dot = len(repr) + e
|
|
||||||
var rem uint32
|
|
||||||
Loop:
|
|
||||||
for d9 := 0; d9 < 5; d9++ {
|
|
||||||
h, l, rem = divmod(h, l, 1e9)
|
|
||||||
for d1 := 0; d1 < 9; d1++ {
|
|
||||||
// Handle "-0.0", "0.00123400", "-1.00E-6", "1.050E+3", etc.
|
|
||||||
if i < len(repr) && (dot == i || l == 0 && h == 0 && rem > 0 && rem < 10 && (dot < i-6 || e > 0)) {
|
|
||||||
e += len(repr) - i
|
|
||||||
i--
|
|
||||||
repr[i] = '.'
|
|
||||||
last = i - 1
|
|
||||||
dot = len(repr) // Unmark.
|
|
||||||
}
|
|
||||||
c := '0' + byte(rem%10)
|
|
||||||
rem /= 10
|
|
||||||
i--
|
|
||||||
repr[i] = c
|
|
||||||
// Handle "0E+3", "1E+3", etc.
|
|
||||||
if l == 0 && h == 0 && rem == 0 && i == len(repr)-1 && (dot < i-5 || e > 0) {
|
|
||||||
last = i
|
|
||||||
break Loop
|
|
||||||
}
|
|
||||||
if c != '0' {
|
|
||||||
last = i
|
|
||||||
}
|
|
||||||
// Break early. Works without it, but why.
|
|
||||||
if dot > i && l == 0 && h == 0 && rem == 0 {
|
|
||||||
break Loop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
repr[last-1] = '-'
|
|
||||||
last--
|
|
||||||
|
|
||||||
if e > 0 {
|
|
||||||
return string(repr[last+pos:]) + "E+" + strconv.Itoa(e)
|
|
||||||
}
|
|
||||||
if e < 0 {
|
|
||||||
return string(repr[last+pos:]) + "E" + strconv.Itoa(e)
|
|
||||||
}
|
|
||||||
return string(repr[last+pos:])
|
|
||||||
}
|
|
||||||
|
|
||||||
func divmod(h, l uint64, div uint32) (qh, ql uint64, rem uint32) {
|
|
||||||
div64 := uint64(div)
|
|
||||||
a := h >> 32
|
|
||||||
aq := a / div64
|
|
||||||
ar := a % div64
|
|
||||||
b := ar<<32 + h&(1<<32-1)
|
|
||||||
bq := b / div64
|
|
||||||
br := b % div64
|
|
||||||
c := br<<32 + l>>32
|
|
||||||
cq := c / div64
|
|
||||||
cr := c % div64
|
|
||||||
d := cr<<32 + l&(1<<32-1)
|
|
||||||
dq := d / div64
|
|
||||||
dr := d % div64
|
|
||||||
return (aq<<32 | bq), (cq<<32 | dq), uint32(dr)
|
|
||||||
}
|
|
||||||
|
|
||||||
var dNaN = Decimal128{0x1F << 58, 0}
|
|
||||||
var dPosInf = Decimal128{0x1E << 58, 0}
|
|
||||||
var dNegInf = Decimal128{0x3E << 58, 0}
|
|
||||||
|
|
||||||
func dErr(s string) (Decimal128, error) {
|
|
||||||
return dNaN, fmt.Errorf("cannot parse %q as a decimal128", s)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseDecimal128 parse a string and return the corresponding value as
|
|
||||||
// a decimal128
|
|
||||||
func ParseDecimal128(s string) (Decimal128, error) {
|
|
||||||
orig := s
|
|
||||||
if s == "" {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
neg := s[0] == '-'
|
|
||||||
if neg || s[0] == '+' {
|
|
||||||
s = s[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
if (len(s) == 3 || len(s) == 8) && (s[0] == 'N' || s[0] == 'n' || s[0] == 'I' || s[0] == 'i') {
|
|
||||||
if s == "NaN" || s == "nan" || strings.EqualFold(s, "nan") {
|
|
||||||
return dNaN, nil
|
|
||||||
}
|
|
||||||
if s == "Inf" || s == "inf" || strings.EqualFold(s, "inf") || strings.EqualFold(s, "infinity") {
|
|
||||||
if neg {
|
|
||||||
return dNegInf, nil
|
|
||||||
}
|
|
||||||
return dPosInf, nil
|
|
||||||
}
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
|
|
||||||
var h, l uint64
|
|
||||||
var e int
|
|
||||||
|
|
||||||
var add, ovr uint32
|
|
||||||
var mul uint32 = 1
|
|
||||||
var dot = -1
|
|
||||||
var digits = 0
|
|
||||||
var i = 0
|
|
||||||
for i < len(s) {
|
|
||||||
c := s[i]
|
|
||||||
if mul == 1e9 {
|
|
||||||
h, l, ovr = muladd(h, l, mul, add)
|
|
||||||
mul, add = 1, 0
|
|
||||||
if ovr > 0 || h&((1<<15-1)<<49) > 0 {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if c >= '0' && c <= '9' {
|
|
||||||
i++
|
|
||||||
if c > '0' || digits > 0 {
|
|
||||||
digits++
|
|
||||||
}
|
|
||||||
if digits > 34 {
|
|
||||||
if c == '0' {
|
|
||||||
// Exact rounding.
|
|
||||||
e++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
mul *= 10
|
|
||||||
add *= 10
|
|
||||||
add += uint32(c - '0')
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if c == '.' {
|
|
||||||
i++
|
|
||||||
if dot >= 0 || i == 1 && len(s) == 1 {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
if i == len(s) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if s[i] < '0' || s[i] > '9' || e > 0 {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
dot = i
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if i == 0 {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
if mul > 1 {
|
|
||||||
h, l, ovr = muladd(h, l, mul, add)
|
|
||||||
if ovr > 0 || h&((1<<15-1)<<49) > 0 {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if dot >= 0 {
|
|
||||||
e += dot - i
|
|
||||||
}
|
|
||||||
if i+1 < len(s) && (s[i] == 'E' || s[i] == 'e') {
|
|
||||||
i++
|
|
||||||
eneg := s[i] == '-'
|
|
||||||
if eneg || s[i] == '+' {
|
|
||||||
i++
|
|
||||||
if i == len(s) {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
n := 0
|
|
||||||
for i < len(s) && n < 1e4 {
|
|
||||||
c := s[i]
|
|
||||||
i++
|
|
||||||
if c < '0' || c > '9' {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
n *= 10
|
|
||||||
n += int(c - '0')
|
|
||||||
}
|
|
||||||
if eneg {
|
|
||||||
n = -n
|
|
||||||
}
|
|
||||||
e += n
|
|
||||||
for e < -6176 {
|
|
||||||
// Subnormal.
|
|
||||||
var div uint32 = 1
|
|
||||||
for div < 1e9 && e < -6176 {
|
|
||||||
div *= 10
|
|
||||||
e++
|
|
||||||
}
|
|
||||||
var rem uint32
|
|
||||||
h, l, rem = divmod(h, l, div)
|
|
||||||
if rem > 0 {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for e > 6111 {
|
|
||||||
// Clamped.
|
|
||||||
var mul uint32 = 1
|
|
||||||
for mul < 1e9 && e > 6111 {
|
|
||||||
mul *= 10
|
|
||||||
e--
|
|
||||||
}
|
|
||||||
h, l, ovr = muladd(h, l, mul, 0)
|
|
||||||
if ovr > 0 || h&((1<<15-1)<<49) > 0 {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if e < -6176 || e > 6111 {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if i < len(s) {
|
|
||||||
return dErr(orig)
|
|
||||||
}
|
|
||||||
|
|
||||||
h |= uint64(e+6176) & uint64(1<<14-1) << 49
|
|
||||||
if neg {
|
|
||||||
h |= 1 << 63
|
|
||||||
}
|
|
||||||
return Decimal128{h, l}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func muladd(h, l uint64, mul uint32, add uint32) (resh, resl uint64, overflow uint32) {
|
|
||||||
mul64 := uint64(mul)
|
|
||||||
a := mul64 * (l & (1<<32 - 1))
|
|
||||||
b := a>>32 + mul64*(l>>32)
|
|
||||||
c := b>>32 + mul64*(h&(1<<32-1))
|
|
||||||
d := c>>32 + mul64*(h>>32)
|
|
||||||
|
|
||||||
a = a&(1<<32-1) + uint64(add)
|
|
||||||
b = b&(1<<32-1) + a>>32
|
|
||||||
c = c&(1<<32-1) + b>>32
|
|
||||||
d = d&(1<<32-1) + c>>32
|
|
||||||
|
|
||||||
return (d<<32 | c&(1<<32-1)), (b<<32 | a&(1<<32-1)), uint32(d >> 32)
|
|
||||||
}
|
|
1055
vendor/github.com/globalsign/mgo/bson/decode.go
generated
vendored
1055
vendor/github.com/globalsign/mgo/bson/decode.go
generated
vendored
File diff suppressed because it is too large
Load diff
645
vendor/github.com/globalsign/mgo/bson/encode.go
generated
vendored
645
vendor/github.com/globalsign/mgo/bson/encode.go
generated
vendored
|
@ -1,645 +0,0 @@
|
||||||
// BSON library for Go
|
|
||||||
//
|
|
||||||
// Copyright (c) 2010-2012 - Gustavo Niemeyer <gustavo@niemeyer.net>
|
|
||||||
//
|
|
||||||
// All rights reserved.
|
|
||||||
//
|
|
||||||
// Redistribution and use in source and binary forms, with or without
|
|
||||||
// modification, are permitted provided that the following conditions are met:
|
|
||||||
//
|
|
||||||
// 1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
// list of conditions and the following disclaimer.
|
|
||||||
// 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
// this list of conditions and the following disclaimer in the documentation
|
|
||||||
// and/or other materials provided with the distribution.
|
|
||||||
//
|
|
||||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
||||||
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
// gobson - BSON library for Go.
|
|
||||||
|
|
||||||
package bson
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"math"
|
|
||||||
"net/url"
|
|
||||||
"reflect"
|
|
||||||
"sort"
|
|
||||||
"strconv"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// Some internal infrastructure.
|
|
||||||
|
|
||||||
var (
|
|
||||||
typeBinary = reflect.TypeOf(Binary{})
|
|
||||||
typeObjectId = reflect.TypeOf(ObjectId(""))
|
|
||||||
typeDBPointer = reflect.TypeOf(DBPointer{"", ObjectId("")})
|
|
||||||
typeSymbol = reflect.TypeOf(Symbol(""))
|
|
||||||
typeMongoTimestamp = reflect.TypeOf(MongoTimestamp(0))
|
|
||||||
typeOrderKey = reflect.TypeOf(MinKey)
|
|
||||||
typeDocElem = reflect.TypeOf(DocElem{})
|
|
||||||
typeRawDocElem = reflect.TypeOf(RawDocElem{})
|
|
||||||
typeRaw = reflect.TypeOf(Raw{})
|
|
||||||
typeRawPtr = reflect.PtrTo(reflect.TypeOf(Raw{}))
|
|
||||||
typeURL = reflect.TypeOf(url.URL{})
|
|
||||||
typeTime = reflect.TypeOf(time.Time{})
|
|
||||||
typeString = reflect.TypeOf("")
|
|
||||||
typeJSONNumber = reflect.TypeOf(json.Number(""))
|
|
||||||
typeTimeDuration = reflect.TypeOf(time.Duration(0))
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// spec for []uint8 or []byte encoding
|
|
||||||
arrayOps = map[string]bool{
|
|
||||||
"$in": true,
|
|
||||||
"$nin": true,
|
|
||||||
"$all": true,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const itoaCacheSize = 32
|
|
||||||
|
|
||||||
const (
|
|
||||||
getterUnknown = iota
|
|
||||||
getterNone
|
|
||||||
getterTypeVal
|
|
||||||
getterTypePtr
|
|
||||||
getterAddr
|
|
||||||
)
|
|
||||||
|
|
||||||
var itoaCache []string
|
|
||||||
|
|
||||||
var getterStyles map[reflect.Type]int
|
|
||||||
var getterIface reflect.Type
|
|
||||||
var getterMutex sync.RWMutex
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
itoaCache = make([]string, itoaCacheSize)
|
|
||||||
for i := 0; i != itoaCacheSize; i++ {
|
|
||||||
itoaCache[i] = strconv.Itoa(i)
|
|
||||||
}
|
|
||||||
var iface Getter
|
|
||||||
getterIface = reflect.TypeOf(&iface).Elem()
|
|
||||||
getterStyles = make(map[reflect.Type]int)
|
|
||||||
}
|
|
||||||
|
|
||||||
func itoa(i int) string {
|
|
||||||
if i < itoaCacheSize {
|
|
||||||
return itoaCache[i]
|
|
||||||
}
|
|
||||||
return strconv.Itoa(i)
|
|
||||||
}
|
|
||||||
|
|
||||||
func getterStyle(outt reflect.Type) int {
|
|
||||||
getterMutex.RLock()
|
|
||||||
style := getterStyles[outt]
|
|
||||||
getterMutex.RUnlock()
|
|
||||||
if style != getterUnknown {
|
|
||||||
return style
|
|
||||||
}
|
|
||||||
|
|
||||||
getterMutex.Lock()
|
|
||||||
defer getterMutex.Unlock()
|
|
||||||
if outt.Implements(getterIface) {
|
|
||||||
vt := outt
|
|
||||||
for vt.Kind() == reflect.Ptr {
|
|
||||||
vt = vt.Elem()
|
|
||||||
}
|
|
||||||
if vt.Implements(getterIface) {
|
|
||||||
style = getterTypeVal
|
|
||||||
} else {
|
|
||||||
style = getterTypePtr
|
|
||||||
}
|
|
||||||
} else if reflect.PtrTo(outt).Implements(getterIface) {
|
|
||||||
style = getterAddr
|
|
||||||
} else {
|
|
||||||
style = getterNone
|
|
||||||
}
|
|
||||||
getterStyles[outt] = style
|
|
||||||
return style
|
|
||||||
}
|
|
||||||
|
|
||||||
func getGetter(outt reflect.Type, out reflect.Value) Getter {
|
|
||||||
style := getterStyle(outt)
|
|
||||||
if style == getterNone {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if style == getterAddr {
|
|
||||||
if !out.CanAddr() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return out.Addr().Interface().(Getter)
|
|
||||||
}
|
|
||||||
if style == getterTypeVal && out.Kind() == reflect.Ptr && out.IsNil() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return out.Interface().(Getter)
|
|
||||||
}
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// Marshaling of the document value itself.
|
|
||||||
|
|
||||||
type encoder struct {
|
|
||||||
out []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addDoc(v reflect.Value) {
|
|
||||||
for {
|
|
||||||
if vi, ok := v.Interface().(Getter); ok {
|
|
||||||
getv, err := vi.GetBSON()
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
v = reflect.ValueOf(getv)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if v.Kind() == reflect.Ptr {
|
|
||||||
v = v.Elem()
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if v.Type() == typeRaw {
|
|
||||||
raw := v.Interface().(Raw)
|
|
||||||
if raw.Kind != 0x03 && raw.Kind != 0x00 {
|
|
||||||
panic("Attempted to marshal Raw kind " + strconv.Itoa(int(raw.Kind)) + " as a document")
|
|
||||||
}
|
|
||||||
if len(raw.Data) == 0 {
|
|
||||||
panic("Attempted to marshal empty Raw document")
|
|
||||||
}
|
|
||||||
e.addBytes(raw.Data...)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
start := e.reserveInt32()
|
|
||||||
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.Map:
|
|
||||||
e.addMap(v)
|
|
||||||
case reflect.Struct:
|
|
||||||
e.addStruct(v)
|
|
||||||
case reflect.Array, reflect.Slice:
|
|
||||||
e.addSlice(v)
|
|
||||||
default:
|
|
||||||
panic("Can't marshal " + v.Type().String() + " as a BSON document")
|
|
||||||
}
|
|
||||||
|
|
||||||
e.addBytes(0)
|
|
||||||
e.setInt32(start, int32(len(e.out)-start))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addMap(v reflect.Value) {
|
|
||||||
for _, k := range v.MapKeys() {
|
|
||||||
e.addElem(fmt.Sprint(k), v.MapIndex(k), false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addStruct(v reflect.Value) {
|
|
||||||
sinfo, err := getStructInfo(v.Type())
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
var value reflect.Value
|
|
||||||
if sinfo.InlineMap >= 0 {
|
|
||||||
m := v.Field(sinfo.InlineMap)
|
|
||||||
if m.Len() > 0 {
|
|
||||||
for _, k := range m.MapKeys() {
|
|
||||||
ks := k.String()
|
|
||||||
if _, found := sinfo.FieldsMap[ks]; found {
|
|
||||||
panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", ks))
|
|
||||||
}
|
|
||||||
e.addElem(ks, m.MapIndex(k), false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, info := range sinfo.FieldsList {
|
|
||||||
if info.Inline == nil {
|
|
||||||
value = v.Field(info.Num)
|
|
||||||
} else {
|
|
||||||
// as pointers to struct are allowed here,
|
|
||||||
// there is no guarantee that pointer won't be nil.
|
|
||||||
//
|
|
||||||
// It is expected allowed behaviour
|
|
||||||
// so info.Inline MAY consist index to a nil pointer
|
|
||||||
// and that is why we safely call v.FieldByIndex and just continue on panic
|
|
||||||
field, errField := safeFieldByIndex(v, info.Inline)
|
|
||||||
if errField != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
value = field
|
|
||||||
}
|
|
||||||
if info.OmitEmpty && isZero(value) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if useRespectNilValues &&
|
|
||||||
(value.Kind() == reflect.Slice || value.Kind() == reflect.Map) &&
|
|
||||||
value.IsNil() {
|
|
||||||
e.addElem(info.Key, reflect.ValueOf(nil), info.MinSize)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
e.addElem(info.Key, value, info.MinSize)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func safeFieldByIndex(v reflect.Value, index []int) (result reflect.Value, err error) {
|
|
||||||
defer func() {
|
|
||||||
if recovered := recover(); recovered != nil {
|
|
||||||
switch r := recovered.(type) {
|
|
||||||
case string:
|
|
||||||
err = fmt.Errorf("%s", r)
|
|
||||||
case error:
|
|
||||||
err = r
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
result = v.FieldByIndex(index)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func isZero(v reflect.Value) bool {
|
|
||||||
switch v.Kind() {
|
|
||||||
case reflect.String:
|
|
||||||
return len(v.String()) == 0
|
|
||||||
case reflect.Ptr, reflect.Interface:
|
|
||||||
return v.IsNil()
|
|
||||||
case reflect.Slice:
|
|
||||||
return v.Len() == 0
|
|
||||||
case reflect.Map:
|
|
||||||
return v.Len() == 0
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
|
||||||
return v.Int() == 0
|
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
|
||||||
return v.Uint() == 0
|
|
||||||
case reflect.Float32, reflect.Float64:
|
|
||||||
return v.Float() == 0
|
|
||||||
case reflect.Bool:
|
|
||||||
return !v.Bool()
|
|
||||||
case reflect.Struct:
|
|
||||||
vt := v.Type()
|
|
||||||
if vt == typeTime {
|
|
||||||
return v.Interface().(time.Time).IsZero()
|
|
||||||
}
|
|
||||||
for i := 0; i < v.NumField(); i++ {
|
|
||||||
if vt.Field(i).PkgPath != "" && !vt.Field(i).Anonymous {
|
|
||||||
continue // Private field
|
|
||||||
}
|
|
||||||
if !isZero(v.Field(i)) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addSlice(v reflect.Value) {
|
|
||||||
vi := v.Interface()
|
|
||||||
if d, ok := vi.(D); ok {
|
|
||||||
for _, elem := range d {
|
|
||||||
e.addElem(elem.Name, reflect.ValueOf(elem.Value), false)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if d, ok := vi.(RawD); ok {
|
|
||||||
for _, elem := range d {
|
|
||||||
e.addElem(elem.Name, reflect.ValueOf(elem.Value), false)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
l := v.Len()
|
|
||||||
et := v.Type().Elem()
|
|
||||||
if et == typeDocElem {
|
|
||||||
for i := 0; i < l; i++ {
|
|
||||||
elem := v.Index(i).Interface().(DocElem)
|
|
||||||
e.addElem(elem.Name, reflect.ValueOf(elem.Value), false)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if et == typeRawDocElem {
|
|
||||||
for i := 0; i < l; i++ {
|
|
||||||
elem := v.Index(i).Interface().(RawDocElem)
|
|
||||||
e.addElem(elem.Name, reflect.ValueOf(elem.Value), false)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
for i := 0; i < l; i++ {
|
|
||||||
e.addElem(itoa(i), v.Index(i), false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// Marshaling of elements in a document.
|
|
||||||
|
|
||||||
func (e *encoder) addElemName(kind byte, name string) {
|
|
||||||
e.addBytes(kind)
|
|
||||||
e.addBytes([]byte(name)...)
|
|
||||||
e.addBytes(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addElem(name string, v reflect.Value, minSize bool) {
|
|
||||||
|
|
||||||
if !v.IsValid() {
|
|
||||||
e.addElemName(0x0A, name)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if getter := getGetter(v.Type(), v); getter != nil {
|
|
||||||
getv, err := getter.GetBSON()
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
e.addElem(name, reflect.ValueOf(getv), minSize)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
switch v.Kind() {
|
|
||||||
|
|
||||||
case reflect.Interface:
|
|
||||||
e.addElem(name, v.Elem(), minSize)
|
|
||||||
|
|
||||||
case reflect.Ptr:
|
|
||||||
e.addElem(name, v.Elem(), minSize)
|
|
||||||
|
|
||||||
case reflect.String:
|
|
||||||
s := v.String()
|
|
||||||
switch v.Type() {
|
|
||||||
case typeObjectId:
|
|
||||||
if len(s) != 12 {
|
|
||||||
panic("ObjectIDs must be exactly 12 bytes long (got " +
|
|
||||||
strconv.Itoa(len(s)) + ")")
|
|
||||||
}
|
|
||||||
e.addElemName(0x07, name)
|
|
||||||
e.addBytes([]byte(s)...)
|
|
||||||
case typeSymbol:
|
|
||||||
e.addElemName(0x0E, name)
|
|
||||||
e.addStr(s)
|
|
||||||
case typeJSONNumber:
|
|
||||||
n := v.Interface().(json.Number)
|
|
||||||
if i, err := n.Int64(); err == nil {
|
|
||||||
e.addElemName(0x12, name)
|
|
||||||
e.addInt64(i)
|
|
||||||
} else if f, err := n.Float64(); err == nil {
|
|
||||||
e.addElemName(0x01, name)
|
|
||||||
e.addFloat64(f)
|
|
||||||
} else {
|
|
||||||
panic("failed to convert json.Number to a number: " + s)
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
e.addElemName(0x02, name)
|
|
||||||
e.addStr(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
case reflect.Float32, reflect.Float64:
|
|
||||||
e.addElemName(0x01, name)
|
|
||||||
e.addFloat64(v.Float())
|
|
||||||
|
|
||||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
|
||||||
u := v.Uint()
|
|
||||||
if int64(u) < 0 {
|
|
||||||
panic("BSON has no uint64 type, and value is too large to fit correctly in an int64")
|
|
||||||
} else if u <= math.MaxInt32 && (minSize || v.Kind() <= reflect.Uint32) {
|
|
||||||
e.addElemName(0x10, name)
|
|
||||||
e.addInt32(int32(u))
|
|
||||||
} else {
|
|
||||||
e.addElemName(0x12, name)
|
|
||||||
e.addInt64(int64(u))
|
|
||||||
}
|
|
||||||
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
|
||||||
switch v.Type() {
|
|
||||||
case typeMongoTimestamp:
|
|
||||||
e.addElemName(0x11, name)
|
|
||||||
e.addInt64(v.Int())
|
|
||||||
|
|
||||||
case typeOrderKey:
|
|
||||||
if v.Int() == int64(MaxKey) {
|
|
||||||
e.addElemName(0x7F, name)
|
|
||||||
} else {
|
|
||||||
e.addElemName(0xFF, name)
|
|
||||||
}
|
|
||||||
case typeTimeDuration:
|
|
||||||
// Stored as int64
|
|
||||||
e.addElemName(0x12, name)
|
|
||||||
|
|
||||||
e.addInt64(int64(v.Int() / 1e6))
|
|
||||||
default:
|
|
||||||
i := v.Int()
|
|
||||||
if (minSize || v.Type().Kind() != reflect.Int64) && i >= math.MinInt32 && i <= math.MaxInt32 {
|
|
||||||
// It fits into an int32, encode as such.
|
|
||||||
e.addElemName(0x10, name)
|
|
||||||
e.addInt32(int32(i))
|
|
||||||
} else {
|
|
||||||
e.addElemName(0x12, name)
|
|
||||||
e.addInt64(i)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case reflect.Bool:
|
|
||||||
e.addElemName(0x08, name)
|
|
||||||
if v.Bool() {
|
|
||||||
e.addBytes(1)
|
|
||||||
} else {
|
|
||||||
e.addBytes(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
case reflect.Map:
|
|
||||||
e.addElemName(0x03, name)
|
|
||||||
e.addDoc(v)
|
|
||||||
|
|
||||||
case reflect.Slice:
|
|
||||||
vt := v.Type()
|
|
||||||
et := vt.Elem()
|
|
||||||
if et.Kind() == reflect.Uint8 {
|
|
||||||
if arrayOps[name] {
|
|
||||||
e.addElemName(0x04, name)
|
|
||||||
e.addDoc(v)
|
|
||||||
} else {
|
|
||||||
e.addElemName(0x05, name)
|
|
||||||
e.addBinary(0x00, v.Bytes())
|
|
||||||
}
|
|
||||||
} else if et == typeDocElem || et == typeRawDocElem {
|
|
||||||
e.addElemName(0x03, name)
|
|
||||||
e.addDoc(v)
|
|
||||||
} else {
|
|
||||||
e.addElemName(0x04, name)
|
|
||||||
e.addDoc(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
case reflect.Array:
|
|
||||||
et := v.Type().Elem()
|
|
||||||
if et.Kind() == reflect.Uint8 {
|
|
||||||
if arrayOps[name] {
|
|
||||||
e.addElemName(0x04, name)
|
|
||||||
e.addDoc(v)
|
|
||||||
} else {
|
|
||||||
e.addElemName(0x05, name)
|
|
||||||
if v.CanAddr() {
|
|
||||||
e.addBinary(0x00, v.Slice(0, v.Len()).Interface().([]byte))
|
|
||||||
} else {
|
|
||||||
n := v.Len()
|
|
||||||
e.addInt32(int32(n))
|
|
||||||
e.addBytes(0x00)
|
|
||||||
for i := 0; i < n; i++ {
|
|
||||||
el := v.Index(i)
|
|
||||||
e.addBytes(byte(el.Uint()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
e.addElemName(0x04, name)
|
|
||||||
e.addDoc(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
case reflect.Struct:
|
|
||||||
switch s := v.Interface().(type) {
|
|
||||||
|
|
||||||
case Raw:
|
|
||||||
kind := s.Kind
|
|
||||||
if kind == 0x00 {
|
|
||||||
kind = 0x03
|
|
||||||
}
|
|
||||||
if len(s.Data) == 0 && kind != 0x06 && kind != 0x0A && kind != 0xFF && kind != 0x7F {
|
|
||||||
panic("Attempted to marshal empty Raw document")
|
|
||||||
}
|
|
||||||
e.addElemName(kind, name)
|
|
||||||
e.addBytes(s.Data...)
|
|
||||||
|
|
||||||
case Binary:
|
|
||||||
e.addElemName(0x05, name)
|
|
||||||
e.addBinary(s.Kind, s.Data)
|
|
||||||
|
|
||||||
case Decimal128:
|
|
||||||
e.addElemName(0x13, name)
|
|
||||||
e.addInt64(int64(s.l))
|
|
||||||
e.addInt64(int64(s.h))
|
|
||||||
|
|
||||||
case DBPointer:
|
|
||||||
e.addElemName(0x0C, name)
|
|
||||||
e.addStr(s.Namespace)
|
|
||||||
if len(s.Id) != 12 {
|
|
||||||
panic("ObjectIDs must be exactly 12 bytes long (got " +
|
|
||||||
strconv.Itoa(len(s.Id)) + ")")
|
|
||||||
}
|
|
||||||
e.addBytes([]byte(s.Id)...)
|
|
||||||
|
|
||||||
case RegEx:
|
|
||||||
e.addElemName(0x0B, name)
|
|
||||||
e.addCStr(s.Pattern)
|
|
||||||
options := runes(s.Options)
|
|
||||||
sort.Sort(options)
|
|
||||||
e.addCStr(string(options))
|
|
||||||
|
|
||||||
case JavaScript:
|
|
||||||
if s.Scope == nil {
|
|
||||||
e.addElemName(0x0D, name)
|
|
||||||
e.addStr(s.Code)
|
|
||||||
} else {
|
|
||||||
e.addElemName(0x0F, name)
|
|
||||||
start := e.reserveInt32()
|
|
||||||
e.addStr(s.Code)
|
|
||||||
e.addDoc(reflect.ValueOf(s.Scope))
|
|
||||||
e.setInt32(start, int32(len(e.out)-start))
|
|
||||||
}
|
|
||||||
|
|
||||||
case time.Time:
|
|
||||||
// MongoDB handles timestamps as milliseconds.
|
|
||||||
e.addElemName(0x09, name)
|
|
||||||
e.addInt64(s.Unix()*1000 + int64(s.Nanosecond()/1e6))
|
|
||||||
|
|
||||||
case url.URL:
|
|
||||||
e.addElemName(0x02, name)
|
|
||||||
e.addStr(s.String())
|
|
||||||
|
|
||||||
case undefined:
|
|
||||||
e.addElemName(0x06, name)
|
|
||||||
|
|
||||||
default:
|
|
||||||
e.addElemName(0x03, name)
|
|
||||||
e.addDoc(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
panic("Can't marshal " + v.Type().String() + " in a BSON document")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// -------------
|
|
||||||
// Helper method for sorting regex options
|
|
||||||
type runes []rune
|
|
||||||
|
|
||||||
func (a runes) Len() int { return len(a) }
|
|
||||||
func (a runes) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
|
||||||
func (a runes) Less(i, j int) bool { return a[i] < a[j] }
|
|
||||||
|
|
||||||
// --------------------------------------------------------------------------
|
|
||||||
// Marshaling of base types.
|
|
||||||
|
|
||||||
func (e *encoder) addBinary(subtype byte, v []byte) {
|
|
||||||
if subtype == 0x02 {
|
|
||||||
// Wonder how that brilliant idea came to life. Obsolete, luckily.
|
|
||||||
e.addInt32(int32(len(v) + 4))
|
|
||||||
e.addBytes(subtype)
|
|
||||||
e.addInt32(int32(len(v)))
|
|
||||||
} else {
|
|
||||||
e.addInt32(int32(len(v)))
|
|
||||||
e.addBytes(subtype)
|
|
||||||
}
|
|
||||||
e.addBytes(v...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addStr(v string) {
|
|
||||||
e.addInt32(int32(len(v) + 1))
|
|
||||||
e.addCStr(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addCStr(v string) {
|
|
||||||
e.addBytes([]byte(v)...)
|
|
||||||
e.addBytes(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) reserveInt32() (pos int) {
|
|
||||||
pos = len(e.out)
|
|
||||||
e.addBytes(0, 0, 0, 0)
|
|
||||||
return pos
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) setInt32(pos int, v int32) {
|
|
||||||
e.out[pos+0] = byte(v)
|
|
||||||
e.out[pos+1] = byte(v >> 8)
|
|
||||||
e.out[pos+2] = byte(v >> 16)
|
|
||||||
e.out[pos+3] = byte(v >> 24)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addInt32(v int32) {
|
|
||||||
u := uint32(v)
|
|
||||||
e.addBytes(byte(u), byte(u>>8), byte(u>>16), byte(u>>24))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addInt64(v int64) {
|
|
||||||
u := uint64(v)
|
|
||||||
e.addBytes(byte(u), byte(u>>8), byte(u>>16), byte(u>>24),
|
|
||||||
byte(u>>32), byte(u>>40), byte(u>>48), byte(u>>56))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addFloat64(v float64) {
|
|
||||||
e.addInt64(int64(math.Float64bits(v)))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *encoder) addBytes(v ...byte) {
|
|
||||||
e.out = append(e.out, v...)
|
|
||||||
}
|
|
384
vendor/github.com/globalsign/mgo/bson/json.go
generated
vendored
384
vendor/github.com/globalsign/mgo/bson/json.go
generated
vendored
|
@ -1,384 +0,0 @@
|
||||||
package bson
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/base64"
|
|
||||||
"fmt"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/globalsign/mgo/internal/json"
|
|
||||||
)
|
|
||||||
|
|
||||||
// UnmarshalJSON unmarshals a JSON value that may hold non-standard
|
|
||||||
// syntax as defined in BSON's extended JSON specification.
|
|
||||||
func UnmarshalJSON(data []byte, value interface{}) error {
|
|
||||||
d := json.NewDecoder(bytes.NewBuffer(data))
|
|
||||||
d.Extend(&jsonExt)
|
|
||||||
return d.Decode(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarshalJSON marshals a JSON value that may hold non-standard
|
|
||||||
// syntax as defined in BSON's extended JSON specification.
|
|
||||||
func MarshalJSON(value interface{}) ([]byte, error) {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
e := json.NewEncoder(&buf)
|
|
||||||
e.Extend(&jsonExt)
|
|
||||||
err := e.Encode(value)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// jdec is used internally by the JSON decoding functions
|
|
||||||
// so they may unmarshal functions without getting into endless
|
|
||||||
// recursion due to keyed objects.
|
|
||||||
func jdec(data []byte, value interface{}) error {
|
|
||||||
d := json.NewDecoder(bytes.NewBuffer(data))
|
|
||||||
d.Extend(&funcExt)
|
|
||||||
return d.Decode(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
var jsonExt json.Extension
|
|
||||||
var funcExt json.Extension
|
|
||||||
|
|
||||||
// TODO
|
|
||||||
// - Shell regular expressions ("/regexp/opts")
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
jsonExt.DecodeUnquotedKeys(true)
|
|
||||||
jsonExt.DecodeTrailingCommas(true)
|
|
||||||
|
|
||||||
funcExt.DecodeFunc("BinData", "$binaryFunc", "$type", "$binary")
|
|
||||||
jsonExt.DecodeKeyed("$binary", jdecBinary)
|
|
||||||
jsonExt.DecodeKeyed("$binaryFunc", jdecBinary)
|
|
||||||
jsonExt.EncodeType([]byte(nil), jencBinarySlice)
|
|
||||||
jsonExt.EncodeType(Binary{}, jencBinaryType)
|
|
||||||
|
|
||||||
funcExt.DecodeFunc("ISODate", "$dateFunc", "S")
|
|
||||||
funcExt.DecodeFunc("new Date", "$dateFunc", "S")
|
|
||||||
jsonExt.DecodeKeyed("$date", jdecDate)
|
|
||||||
jsonExt.DecodeKeyed("$dateFunc", jdecDate)
|
|
||||||
jsonExt.EncodeType(time.Time{}, jencDate)
|
|
||||||
|
|
||||||
funcExt.DecodeFunc("Timestamp", "$timestamp", "t", "i")
|
|
||||||
jsonExt.DecodeKeyed("$timestamp", jdecTimestamp)
|
|
||||||
jsonExt.EncodeType(MongoTimestamp(0), jencTimestamp)
|
|
||||||
|
|
||||||
funcExt.DecodeConst("undefined", Undefined)
|
|
||||||
|
|
||||||
jsonExt.DecodeKeyed("$regex", jdecRegEx)
|
|
||||||
jsonExt.EncodeType(RegEx{}, jencRegEx)
|
|
||||||
|
|
||||||
funcExt.DecodeFunc("ObjectId", "$oidFunc", "Id")
|
|
||||||
jsonExt.DecodeKeyed("$oid", jdecObjectId)
|
|
||||||
jsonExt.DecodeKeyed("$oidFunc", jdecObjectId)
|
|
||||||
jsonExt.EncodeType(ObjectId(""), jencObjectId)
|
|
||||||
|
|
||||||
funcExt.DecodeFunc("DBRef", "$dbrefFunc", "$ref", "$id")
|
|
||||||
jsonExt.DecodeKeyed("$dbrefFunc", jdecDBRef)
|
|
||||||
|
|
||||||
funcExt.DecodeFunc("NumberLong", "$numberLongFunc", "N")
|
|
||||||
jsonExt.DecodeKeyed("$numberLong", jdecNumberLong)
|
|
||||||
jsonExt.DecodeKeyed("$numberLongFunc", jdecNumberLong)
|
|
||||||
jsonExt.EncodeType(int64(0), jencNumberLong)
|
|
||||||
jsonExt.EncodeType(int(0), jencInt)
|
|
||||||
|
|
||||||
funcExt.DecodeConst("MinKey", MinKey)
|
|
||||||
funcExt.DecodeConst("MaxKey", MaxKey)
|
|
||||||
jsonExt.DecodeKeyed("$minKey", jdecMinKey)
|
|
||||||
jsonExt.DecodeKeyed("$maxKey", jdecMaxKey)
|
|
||||||
jsonExt.EncodeType(orderKey(0), jencMinMaxKey)
|
|
||||||
|
|
||||||
jsonExt.DecodeKeyed("$undefined", jdecUndefined)
|
|
||||||
jsonExt.EncodeType(Undefined, jencUndefined)
|
|
||||||
|
|
||||||
jsonExt.Extend(&funcExt)
|
|
||||||
}
|
|
||||||
|
|
||||||
func fbytes(format string, args ...interface{}) []byte {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
fmt.Fprintf(&buf, format, args...)
|
|
||||||
return buf.Bytes()
|
|
||||||
}
|
|
||||||
|
|
||||||
func jdecBinary(data []byte) (interface{}, error) {
|
|
||||||
var v struct {
|
|
||||||
Binary []byte `json:"$binary"`
|
|
||||||
Type string `json:"$type"`
|
|
||||||
Func struct {
|
|
||||||
Binary []byte `json:"$binary"`
|
|
||||||
Type int64 `json:"$type"`
|
|
||||||
} `json:"$binaryFunc"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var binData []byte
|
|
||||||
var binKind int64
|
|
||||||
if v.Type == "" && v.Binary == nil {
|
|
||||||
binData = v.Func.Binary
|
|
||||||
binKind = v.Func.Type
|
|
||||||
} else if v.Type == "" {
|
|
||||||
return v.Binary, nil
|
|
||||||
} else {
|
|
||||||
binData = v.Binary
|
|
||||||
binKind, err = strconv.ParseInt(v.Type, 0, 64)
|
|
||||||
if err != nil {
|
|
||||||
binKind = -1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if binKind == 0 {
|
|
||||||
return binData, nil
|
|
||||||
}
|
|
||||||
if binKind < 0 || binKind > 255 {
|
|
||||||
return nil, fmt.Errorf("invalid type in binary object: %s", data)
|
|
||||||
}
|
|
||||||
|
|
||||||
return Binary{Kind: byte(binKind), Data: binData}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencBinarySlice(v interface{}) ([]byte, error) {
|
|
||||||
in := v.([]byte)
|
|
||||||
out := make([]byte, base64.StdEncoding.EncodedLen(len(in)))
|
|
||||||
base64.StdEncoding.Encode(out, in)
|
|
||||||
return fbytes(`{"$binary":"%s","$type":"0x0"}`, out), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencBinaryType(v interface{}) ([]byte, error) {
|
|
||||||
in := v.(Binary)
|
|
||||||
out := make([]byte, base64.StdEncoding.EncodedLen(len(in.Data)))
|
|
||||||
base64.StdEncoding.Encode(out, in.Data)
|
|
||||||
return fbytes(`{"$binary":"%s","$type":"0x%x"}`, out, in.Kind), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
const jdateFormat = "2006-01-02T15:04:05.999Z07:00"
|
|
||||||
|
|
||||||
func jdecDate(data []byte) (interface{}, error) {
|
|
||||||
var v struct {
|
|
||||||
S string `json:"$date"`
|
|
||||||
Func struct {
|
|
||||||
S string
|
|
||||||
} `json:"$dateFunc"`
|
|
||||||
}
|
|
||||||
_ = jdec(data, &v)
|
|
||||||
if v.S == "" {
|
|
||||||
v.S = v.Func.S
|
|
||||||
}
|
|
||||||
if v.S != "" {
|
|
||||||
var errs []string
|
|
||||||
for _, format := range []string{jdateFormat, "2006-01-02"} {
|
|
||||||
t, err := time.Parse(format, v.S)
|
|
||||||
if err == nil {
|
|
||||||
return t, nil
|
|
||||||
}
|
|
||||||
errs = append(errs, err.Error())
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("cannot parse date: %q [%s]", v.S, strings.Join(errs, ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
var vn struct {
|
|
||||||
Date struct {
|
|
||||||
N int64 `json:"$numberLong,string"`
|
|
||||||
} `json:"$date"`
|
|
||||||
Func struct {
|
|
||||||
S int64
|
|
||||||
} `json:"$dateFunc"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &vn)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("cannot parse date: %q", data)
|
|
||||||
}
|
|
||||||
n := vn.Date.N
|
|
||||||
if n == 0 {
|
|
||||||
n = vn.Func.S
|
|
||||||
}
|
|
||||||
return time.Unix(n/1000, n%1000*1e6).UTC(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencDate(v interface{}) ([]byte, error) {
|
|
||||||
t := v.(time.Time)
|
|
||||||
return fbytes(`{"$date":%q}`, t.Format(jdateFormat)), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jdecTimestamp(data []byte) (interface{}, error) {
|
|
||||||
var v struct {
|
|
||||||
Func struct {
|
|
||||||
T int32 `json:"t"`
|
|
||||||
I int32 `json:"i"`
|
|
||||||
} `json:"$timestamp"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return MongoTimestamp(uint64(v.Func.T)<<32 | uint64(uint32(v.Func.I))), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencTimestamp(v interface{}) ([]byte, error) {
|
|
||||||
ts := uint64(v.(MongoTimestamp))
|
|
||||||
return fbytes(`{"$timestamp":{"t":%d,"i":%d}}`, ts>>32, uint32(ts)), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jdecRegEx(data []byte) (interface{}, error) {
|
|
||||||
var v struct {
|
|
||||||
Regex string `json:"$regex"`
|
|
||||||
Options string `json:"$options"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return RegEx{v.Regex, v.Options}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencRegEx(v interface{}) ([]byte, error) {
|
|
||||||
re := v.(RegEx)
|
|
||||||
type regex struct {
|
|
||||||
Regex string `json:"$regex"`
|
|
||||||
Options string `json:"$options"`
|
|
||||||
}
|
|
||||||
return json.Marshal(regex{re.Pattern, re.Options})
|
|
||||||
}
|
|
||||||
|
|
||||||
func jdecObjectId(data []byte) (interface{}, error) {
|
|
||||||
var v struct {
|
|
||||||
Id string `json:"$oid"`
|
|
||||||
Func struct {
|
|
||||||
Id string
|
|
||||||
} `json:"$oidFunc"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if v.Id == "" {
|
|
||||||
v.Id = v.Func.Id
|
|
||||||
}
|
|
||||||
return ObjectIdHex(v.Id), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencObjectId(v interface{}) ([]byte, error) {
|
|
||||||
return fbytes(`{"$oid":"%s"}`, v.(ObjectId).Hex()), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jdecDBRef(data []byte) (interface{}, error) {
|
|
||||||
// TODO Support unmarshaling $ref and $id into the input value.
|
|
||||||
var v struct {
|
|
||||||
Obj map[string]interface{} `json:"$dbrefFunc"`
|
|
||||||
}
|
|
||||||
// TODO Fix this. Must not be required.
|
|
||||||
v.Obj = make(map[string]interface{})
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return v.Obj, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jdecNumberLong(data []byte) (interface{}, error) {
|
|
||||||
var v struct {
|
|
||||||
N int64 `json:"$numberLong,string"`
|
|
||||||
Func struct {
|
|
||||||
N int64 `json:",string"`
|
|
||||||
} `json:"$numberLongFunc"`
|
|
||||||
}
|
|
||||||
var vn struct {
|
|
||||||
N int64 `json:"$numberLong"`
|
|
||||||
Func struct {
|
|
||||||
N int64
|
|
||||||
} `json:"$numberLongFunc"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err != nil {
|
|
||||||
err = jdec(data, &vn)
|
|
||||||
v.N = vn.N
|
|
||||||
v.Func.N = vn.Func.N
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if v.N != 0 {
|
|
||||||
return v.N, nil
|
|
||||||
}
|
|
||||||
return v.Func.N, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencNumberLong(v interface{}) ([]byte, error) {
|
|
||||||
n := v.(int64)
|
|
||||||
f := `{"$numberLong":"%d"}`
|
|
||||||
if n <= 1<<53 {
|
|
||||||
f = `{"$numberLong":%d}`
|
|
||||||
}
|
|
||||||
return fbytes(f, n), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencInt(v interface{}) ([]byte, error) {
|
|
||||||
n := v.(int)
|
|
||||||
f := `{"$numberLong":"%d"}`
|
|
||||||
if int64(n) <= 1<<53 {
|
|
||||||
f = `%d`
|
|
||||||
}
|
|
||||||
return fbytes(f, n), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jdecMinKey(data []byte) (interface{}, error) {
|
|
||||||
var v struct {
|
|
||||||
N int64 `json:"$minKey"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if v.N != 1 {
|
|
||||||
return nil, fmt.Errorf("invalid $minKey object: %s", data)
|
|
||||||
}
|
|
||||||
return MinKey, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jdecMaxKey(data []byte) (interface{}, error) {
|
|
||||||
var v struct {
|
|
||||||
N int64 `json:"$maxKey"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if v.N != 1 {
|
|
||||||
return nil, fmt.Errorf("invalid $maxKey object: %s", data)
|
|
||||||
}
|
|
||||||
return MaxKey, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencMinMaxKey(v interface{}) ([]byte, error) {
|
|
||||||
switch v.(orderKey) {
|
|
||||||
case MinKey:
|
|
||||||
return []byte(`{"$minKey":1}`), nil
|
|
||||||
case MaxKey:
|
|
||||||
return []byte(`{"$maxKey":1}`), nil
|
|
||||||
}
|
|
||||||
panic(fmt.Sprintf("invalid $minKey/$maxKey value: %d", v))
|
|
||||||
}
|
|
||||||
|
|
||||||
func jdecUndefined(data []byte) (interface{}, error) {
|
|
||||||
var v struct {
|
|
||||||
B bool `json:"$undefined"`
|
|
||||||
}
|
|
||||||
err := jdec(data, &v)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if !v.B {
|
|
||||||
return nil, fmt.Errorf("invalid $undefined object: %s", data)
|
|
||||||
}
|
|
||||||
return Undefined, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func jencUndefined(v interface{}) ([]byte, error) {
|
|
||||||
return []byte(`{"$undefined":true}`), nil
|
|
||||||
}
|
|
90
vendor/github.com/globalsign/mgo/bson/stream.go
generated
vendored
90
vendor/github.com/globalsign/mgo/bson/stream.go
generated
vendored
|
@ -1,90 +0,0 @@
|
||||||
package bson
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/binary"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
// MinDocumentSize is the size of the smallest possible valid BSON document:
|
|
||||||
// an int32 size header + 0x00 (end of document).
|
|
||||||
MinDocumentSize = 5
|
|
||||||
|
|
||||||
// MaxDocumentSize is the largest possible size for a BSON document allowed by MongoDB,
|
|
||||||
// that is, 16 MiB (see https://docs.mongodb.com/manual/reference/limits/).
|
|
||||||
MaxDocumentSize = 16777216
|
|
||||||
)
|
|
||||||
|
|
||||||
// ErrInvalidDocumentSize is an error returned when a BSON document's header
|
|
||||||
// contains a size smaller than MinDocumentSize or greater than MaxDocumentSize.
|
|
||||||
type ErrInvalidDocumentSize struct {
|
|
||||||
DocumentSize int32
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e ErrInvalidDocumentSize) Error() string {
|
|
||||||
return fmt.Sprintf("invalid document size %d", e.DocumentSize)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A Decoder reads and decodes BSON values from an input stream.
|
|
||||||
type Decoder struct {
|
|
||||||
source io.Reader
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewDecoder returns a new Decoder that reads from source.
|
|
||||||
// It does not add any extra buffering, and may not read data from source beyond the BSON values requested.
|
|
||||||
func NewDecoder(source io.Reader) *Decoder {
|
|
||||||
return &Decoder{source: source}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decode reads the next BSON-encoded value from its input and stores it in the value pointed to by v.
|
|
||||||
// See the documentation for Unmarshal for details about the conversion of BSON into a Go value.
|
|
||||||
func (dec *Decoder) Decode(v interface{}) (err error) {
|
|
||||||
// BSON documents start with their size as a *signed* int32.
|
|
||||||
var docSize int32
|
|
||||||
if err = binary.Read(dec.source, binary.LittleEndian, &docSize); err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if docSize < MinDocumentSize || docSize > MaxDocumentSize {
|
|
||||||
return ErrInvalidDocumentSize{DocumentSize: docSize}
|
|
||||||
}
|
|
||||||
|
|
||||||
docBuffer := bytes.NewBuffer(make([]byte, 0, docSize))
|
|
||||||
if err = binary.Write(docBuffer, binary.LittleEndian, docSize); err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// docSize is the *full* document's size (including the 4-byte size header,
|
|
||||||
// which has already been read).
|
|
||||||
if _, err = io.CopyN(docBuffer, dec.source, int64(docSize-4)); err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Let Unmarshal handle the rest.
|
|
||||||
defer handleErr(&err)
|
|
||||||
return Unmarshal(docBuffer.Bytes(), v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// An Encoder encodes and writes BSON values to an output stream.
|
|
||||||
type Encoder struct {
|
|
||||||
target io.Writer
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewEncoder returns a new Encoder that writes to target.
|
|
||||||
func NewEncoder(target io.Writer) *Encoder {
|
|
||||||
return &Encoder{target: target}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Encode encodes v to BSON, and if successful writes it to the Encoder's output stream.
|
|
||||||
// See the documentation for Marshal for details about the conversion of Go values to BSON.
|
|
||||||
func (enc *Encoder) Encode(v interface{}) error {
|
|
||||||
data, err := Marshal(v)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = enc.target.Write(data)
|
|
||||||
return err
|
|
||||||
}
|
|
27
vendor/github.com/globalsign/mgo/internal/json/LICENSE
generated
vendored
27
vendor/github.com/globalsign/mgo/internal/json/LICENSE
generated
vendored
|
@ -1,27 +0,0 @@
|
||||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are
|
|
||||||
met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer.
|
|
||||||
* Redistributions in binary form must reproduce the above
|
|
||||||
copyright notice, this list of conditions and the following disclaimer
|
|
||||||
in the documentation and/or other materials provided with the
|
|
||||||
distribution.
|
|
||||||
* Neither the name of Google Inc. nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
1685
vendor/github.com/globalsign/mgo/internal/json/decode.go
generated
vendored
1685
vendor/github.com/globalsign/mgo/internal/json/decode.go
generated
vendored
File diff suppressed because it is too large
Load diff
1260
vendor/github.com/globalsign/mgo/internal/json/encode.go
generated
vendored
1260
vendor/github.com/globalsign/mgo/internal/json/encode.go
generated
vendored
File diff suppressed because it is too large
Load diff
95
vendor/github.com/globalsign/mgo/internal/json/extension.go
generated
vendored
95
vendor/github.com/globalsign/mgo/internal/json/extension.go
generated
vendored
|
@ -1,95 +0,0 @@
|
||||||
package json
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Extension holds a set of additional rules to be used when unmarshaling
|
|
||||||
// strict JSON or JSON-like content.
|
|
||||||
type Extension struct {
|
|
||||||
funcs map[string]funcExt
|
|
||||||
consts map[string]interface{}
|
|
||||||
keyed map[string]func([]byte) (interface{}, error)
|
|
||||||
encode map[reflect.Type]func(v interface{}) ([]byte, error)
|
|
||||||
|
|
||||||
unquotedKeys bool
|
|
||||||
trailingCommas bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type funcExt struct {
|
|
||||||
key string
|
|
||||||
args []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extend changes the decoder behavior to consider the provided extension.
|
|
||||||
func (dec *Decoder) Extend(ext *Extension) { dec.d.ext = *ext }
|
|
||||||
|
|
||||||
// Extend changes the encoder behavior to consider the provided extension.
|
|
||||||
func (enc *Encoder) Extend(ext *Extension) { enc.ext = *ext }
|
|
||||||
|
|
||||||
// Extend includes in e the extensions defined in ext.
|
|
||||||
func (e *Extension) Extend(ext *Extension) {
|
|
||||||
for name, fext := range ext.funcs {
|
|
||||||
e.DecodeFunc(name, fext.key, fext.args...)
|
|
||||||
}
|
|
||||||
for name, value := range ext.consts {
|
|
||||||
e.DecodeConst(name, value)
|
|
||||||
}
|
|
||||||
for key, decode := range ext.keyed {
|
|
||||||
e.DecodeKeyed(key, decode)
|
|
||||||
}
|
|
||||||
for typ, encode := range ext.encode {
|
|
||||||
if e.encode == nil {
|
|
||||||
e.encode = make(map[reflect.Type]func(v interface{}) ([]byte, error))
|
|
||||||
}
|
|
||||||
e.encode[typ] = encode
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeFunc defines a function call that may be observed inside JSON content.
|
|
||||||
// A function with the provided name will be unmarshaled as the document
|
|
||||||
// {key: {args[0]: ..., args[N]: ...}}.
|
|
||||||
func (e *Extension) DecodeFunc(name string, key string, args ...string) {
|
|
||||||
if e.funcs == nil {
|
|
||||||
e.funcs = make(map[string]funcExt)
|
|
||||||
}
|
|
||||||
e.funcs[name] = funcExt{key, args}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeConst defines a constant name that may be observed inside JSON content
|
|
||||||
// and will be decoded with the provided value.
|
|
||||||
func (e *Extension) DecodeConst(name string, value interface{}) {
|
|
||||||
if e.consts == nil {
|
|
||||||
e.consts = make(map[string]interface{})
|
|
||||||
}
|
|
||||||
e.consts[name] = value
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeKeyed defines a key that when observed as the first element inside a
|
|
||||||
// JSON document triggers the decoding of that document via the provided
|
|
||||||
// decode function.
|
|
||||||
func (e *Extension) DecodeKeyed(key string, decode func(data []byte) (interface{}, error)) {
|
|
||||||
if e.keyed == nil {
|
|
||||||
e.keyed = make(map[string]func([]byte) (interface{}, error))
|
|
||||||
}
|
|
||||||
e.keyed[key] = decode
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeUnquotedKeys defines whether to accept map keys that are unquoted strings.
|
|
||||||
func (e *Extension) DecodeUnquotedKeys(accept bool) {
|
|
||||||
e.unquotedKeys = accept
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeTrailingCommas defines whether to accept trailing commas in maps and arrays.
|
|
||||||
func (e *Extension) DecodeTrailingCommas(accept bool) {
|
|
||||||
e.trailingCommas = accept
|
|
||||||
}
|
|
||||||
|
|
||||||
// EncodeType registers a function to encode values with the same type of the
|
|
||||||
// provided sample.
|
|
||||||
func (e *Extension) EncodeType(sample interface{}, encode func(v interface{}) ([]byte, error)) {
|
|
||||||
if e.encode == nil {
|
|
||||||
e.encode = make(map[reflect.Type]func(v interface{}) ([]byte, error))
|
|
||||||
}
|
|
||||||
e.encode[reflect.TypeOf(sample)] = encode
|
|
||||||
}
|
|
143
vendor/github.com/globalsign/mgo/internal/json/fold.go
generated
vendored
143
vendor/github.com/globalsign/mgo/internal/json/fold.go
generated
vendored
|
@ -1,143 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package json
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
caseMask = ^byte(0x20) // Mask to ignore case in ASCII.
|
|
||||||
kelvin = '\u212a'
|
|
||||||
smallLongEss = '\u017f'
|
|
||||||
)
|
|
||||||
|
|
||||||
// foldFunc returns one of four different case folding equivalence
|
|
||||||
// functions, from most general (and slow) to fastest:
|
|
||||||
//
|
|
||||||
// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8
|
|
||||||
// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S')
|
|
||||||
// 3) asciiEqualFold, no special, but includes non-letters (including _)
|
|
||||||
// 4) simpleLetterEqualFold, no specials, no non-letters.
|
|
||||||
//
|
|
||||||
// The letters S and K are special because they map to 3 runes, not just 2:
|
|
||||||
// * S maps to s and to U+017F 'ſ' Latin small letter long s
|
|
||||||
// * k maps to K and to U+212A 'K' Kelvin sign
|
|
||||||
// See https://play.golang.org/p/tTxjOc0OGo
|
|
||||||
//
|
|
||||||
// The returned function is specialized for matching against s and
|
|
||||||
// should only be given s. It's not curried for performance reasons.
|
|
||||||
func foldFunc(s []byte) func(s, t []byte) bool {
|
|
||||||
nonLetter := false
|
|
||||||
special := false // special letter
|
|
||||||
for _, b := range s {
|
|
||||||
if b >= utf8.RuneSelf {
|
|
||||||
return bytes.EqualFold
|
|
||||||
}
|
|
||||||
upper := b & caseMask
|
|
||||||
if upper < 'A' || upper > 'Z' {
|
|
||||||
nonLetter = true
|
|
||||||
} else if upper == 'K' || upper == 'S' {
|
|
||||||
// See above for why these letters are special.
|
|
||||||
special = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if special {
|
|
||||||
return equalFoldRight
|
|
||||||
}
|
|
||||||
if nonLetter {
|
|
||||||
return asciiEqualFold
|
|
||||||
}
|
|
||||||
return simpleLetterEqualFold
|
|
||||||
}
|
|
||||||
|
|
||||||
// equalFoldRight is a specialization of bytes.EqualFold when s is
|
|
||||||
// known to be all ASCII (including punctuation), but contains an 's',
|
|
||||||
// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t.
|
|
||||||
// See comments on foldFunc.
|
|
||||||
func equalFoldRight(s, t []byte) bool {
|
|
||||||
for _, sb := range s {
|
|
||||||
if len(t) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
tb := t[0]
|
|
||||||
if tb < utf8.RuneSelf {
|
|
||||||
if sb != tb {
|
|
||||||
sbUpper := sb & caseMask
|
|
||||||
if 'A' <= sbUpper && sbUpper <= 'Z' {
|
|
||||||
if sbUpper != tb&caseMask {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
t = t[1:]
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// sb is ASCII and t is not. t must be either kelvin
|
|
||||||
// sign or long s; sb must be s, S, k, or K.
|
|
||||||
tr, size := utf8.DecodeRune(t)
|
|
||||||
switch sb {
|
|
||||||
case 's', 'S':
|
|
||||||
if tr != smallLongEss {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
case 'k', 'K':
|
|
||||||
if tr != kelvin {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
t = t[size:]
|
|
||||||
|
|
||||||
}
|
|
||||||
if len(t) > 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// asciiEqualFold is a specialization of bytes.EqualFold for use when
|
|
||||||
// s is all ASCII (but may contain non-letters) and contains no
|
|
||||||
// special-folding letters.
|
|
||||||
// See comments on foldFunc.
|
|
||||||
func asciiEqualFold(s, t []byte) bool {
|
|
||||||
if len(s) != len(t) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i, sb := range s {
|
|
||||||
tb := t[i]
|
|
||||||
if sb == tb {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') {
|
|
||||||
if sb&caseMask != tb&caseMask {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// simpleLetterEqualFold is a specialization of bytes.EqualFold for
|
|
||||||
// use when s is all ASCII letters (no underscores, etc) and also
|
|
||||||
// doesn't contain 'k', 'K', 's', or 'S'.
|
|
||||||
// See comments on foldFunc.
|
|
||||||
func simpleLetterEqualFold(s, t []byte) bool {
|
|
||||||
if len(s) != len(t) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i, b := range s {
|
|
||||||
if b&caseMask != t[i]&caseMask {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
141
vendor/github.com/globalsign/mgo/internal/json/indent.go
generated
vendored
141
vendor/github.com/globalsign/mgo/internal/json/indent.go
generated
vendored
|
@ -1,141 +0,0 @@
|
||||||
// Copyright 2010 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package json
|
|
||||||
|
|
||||||
import "bytes"
|
|
||||||
|
|
||||||
// Compact appends to dst the JSON-encoded src with
|
|
||||||
// insignificant space characters elided.
|
|
||||||
func Compact(dst *bytes.Buffer, src []byte) error {
|
|
||||||
return compact(dst, src, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
func compact(dst *bytes.Buffer, src []byte, escape bool) error {
|
|
||||||
origLen := dst.Len()
|
|
||||||
var scan scanner
|
|
||||||
scan.reset()
|
|
||||||
start := 0
|
|
||||||
for i, c := range src {
|
|
||||||
if escape && (c == '<' || c == '>' || c == '&') {
|
|
||||||
if start < i {
|
|
||||||
dst.Write(src[start:i])
|
|
||||||
}
|
|
||||||
dst.WriteString(`\u00`)
|
|
||||||
dst.WriteByte(hex[c>>4])
|
|
||||||
dst.WriteByte(hex[c&0xF])
|
|
||||||
start = i + 1
|
|
||||||
}
|
|
||||||
// Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
|
|
||||||
if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
|
|
||||||
if start < i {
|
|
||||||
dst.Write(src[start:i])
|
|
||||||
}
|
|
||||||
dst.WriteString(`\u202`)
|
|
||||||
dst.WriteByte(hex[src[i+2]&0xF])
|
|
||||||
start = i + 3
|
|
||||||
}
|
|
||||||
v := scan.step(&scan, c)
|
|
||||||
if v >= scanSkipSpace {
|
|
||||||
if v == scanError {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if start < i {
|
|
||||||
dst.Write(src[start:i])
|
|
||||||
}
|
|
||||||
start = i + 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if scan.eof() == scanError {
|
|
||||||
dst.Truncate(origLen)
|
|
||||||
return scan.err
|
|
||||||
}
|
|
||||||
if start < len(src) {
|
|
||||||
dst.Write(src[start:])
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func newline(dst *bytes.Buffer, prefix, indent string, depth int) {
|
|
||||||
dst.WriteByte('\n')
|
|
||||||
dst.WriteString(prefix)
|
|
||||||
for i := 0; i < depth; i++ {
|
|
||||||
dst.WriteString(indent)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Indent appends to dst an indented form of the JSON-encoded src.
|
|
||||||
// Each element in a JSON object or array begins on a new,
|
|
||||||
// indented line beginning with prefix followed by one or more
|
|
||||||
// copies of indent according to the indentation nesting.
|
|
||||||
// The data appended to dst does not begin with the prefix nor
|
|
||||||
// any indentation, to make it easier to embed inside other formatted JSON data.
|
|
||||||
// Although leading space characters (space, tab, carriage return, newline)
|
|
||||||
// at the beginning of src are dropped, trailing space characters
|
|
||||||
// at the end of src are preserved and copied to dst.
|
|
||||||
// For example, if src has no trailing spaces, neither will dst;
|
|
||||||
// if src ends in a trailing newline, so will dst.
|
|
||||||
func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
|
|
||||||
origLen := dst.Len()
|
|
||||||
var scan scanner
|
|
||||||
scan.reset()
|
|
||||||
needIndent := false
|
|
||||||
depth := 0
|
|
||||||
for _, c := range src {
|
|
||||||
scan.bytes++
|
|
||||||
v := scan.step(&scan, c)
|
|
||||||
if v == scanSkipSpace {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if v == scanError {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if needIndent && v != scanEndObject && v != scanEndArray {
|
|
||||||
needIndent = false
|
|
||||||
depth++
|
|
||||||
newline(dst, prefix, indent, depth)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Emit semantically uninteresting bytes
|
|
||||||
// (in particular, punctuation in strings) unmodified.
|
|
||||||
if v == scanContinue {
|
|
||||||
dst.WriteByte(c)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add spacing around real punctuation.
|
|
||||||
switch c {
|
|
||||||
case '{', '[':
|
|
||||||
// delay indent so that empty object and array are formatted as {} and [].
|
|
||||||
needIndent = true
|
|
||||||
dst.WriteByte(c)
|
|
||||||
|
|
||||||
case ',':
|
|
||||||
dst.WriteByte(c)
|
|
||||||
newline(dst, prefix, indent, depth)
|
|
||||||
|
|
||||||
case ':':
|
|
||||||
dst.WriteByte(c)
|
|
||||||
dst.WriteByte(' ')
|
|
||||||
|
|
||||||
case '}', ']':
|
|
||||||
if needIndent {
|
|
||||||
// suppress indent in empty object/array
|
|
||||||
needIndent = false
|
|
||||||
} else {
|
|
||||||
depth--
|
|
||||||
newline(dst, prefix, indent, depth)
|
|
||||||
}
|
|
||||||
dst.WriteByte(c)
|
|
||||||
|
|
||||||
default:
|
|
||||||
dst.WriteByte(c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if scan.eof() == scanError {
|
|
||||||
dst.Truncate(origLen)
|
|
||||||
return scan.err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
697
vendor/github.com/globalsign/mgo/internal/json/scanner.go
generated
vendored
697
vendor/github.com/globalsign/mgo/internal/json/scanner.go
generated
vendored
|
@ -1,697 +0,0 @@
|
||||||
// Copyright 2010 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package json
|
|
||||||
|
|
||||||
// JSON value parser state machine.
|
|
||||||
// Just about at the limit of what is reasonable to write by hand.
|
|
||||||
// Some parts are a bit tedious, but overall it nicely factors out the
|
|
||||||
// otherwise common code from the multiple scanning functions
|
|
||||||
// in this package (Compact, Indent, checkValid, nextValue, etc).
|
|
||||||
//
|
|
||||||
// This file starts with two simple examples using the scanner
|
|
||||||
// before diving into the scanner itself.
|
|
||||||
|
|
||||||
import "strconv"
|
|
||||||
|
|
||||||
// checkValid verifies that data is valid JSON-encoded data.
|
|
||||||
// scan is passed in for use by checkValid to avoid an allocation.
|
|
||||||
func checkValid(data []byte, scan *scanner) error {
|
|
||||||
scan.reset()
|
|
||||||
for _, c := range data {
|
|
||||||
scan.bytes++
|
|
||||||
if scan.step(scan, c) == scanError {
|
|
||||||
return scan.err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if scan.eof() == scanError {
|
|
||||||
return scan.err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// nextValue splits data after the next whole JSON value,
|
|
||||||
// returning that value and the bytes that follow it as separate slices.
|
|
||||||
// scan is passed in for use by nextValue to avoid an allocation.
|
|
||||||
func nextValue(data []byte, scan *scanner) (value, rest []byte, err error) {
|
|
||||||
scan.reset()
|
|
||||||
for i, c := range data {
|
|
||||||
v := scan.step(scan, c)
|
|
||||||
if v >= scanEndObject {
|
|
||||||
switch v {
|
|
||||||
// probe the scanner with a space to determine whether we will
|
|
||||||
// get scanEnd on the next character. Otherwise, if the next character
|
|
||||||
// is not a space, scanEndTop allocates a needless error.
|
|
||||||
case scanEndObject, scanEndArray, scanEndParams:
|
|
||||||
if scan.step(scan, ' ') == scanEnd {
|
|
||||||
return data[:i+1], data[i+1:], nil
|
|
||||||
}
|
|
||||||
case scanError:
|
|
||||||
return nil, nil, scan.err
|
|
||||||
case scanEnd:
|
|
||||||
return data[:i], data[i:], nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if scan.eof() == scanError {
|
|
||||||
return nil, nil, scan.err
|
|
||||||
}
|
|
||||||
return data, nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// A SyntaxError is a description of a JSON syntax error.
|
|
||||||
type SyntaxError struct {
|
|
||||||
msg string // description of error
|
|
||||||
Offset int64 // error occurred after reading Offset bytes
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyntaxError) Error() string { return e.msg }
|
|
||||||
|
|
||||||
// A scanner is a JSON scanning state machine.
|
|
||||||
// Callers call scan.reset() and then pass bytes in one at a time
|
|
||||||
// by calling scan.step(&scan, c) for each byte.
|
|
||||||
// The return value, referred to as an opcode, tells the
|
|
||||||
// caller about significant parsing events like beginning
|
|
||||||
// and ending literals, objects, and arrays, so that the
|
|
||||||
// caller can follow along if it wishes.
|
|
||||||
// The return value scanEnd indicates that a single top-level
|
|
||||||
// JSON value has been completed, *before* the byte that
|
|
||||||
// just got passed in. (The indication must be delayed in order
|
|
||||||
// to recognize the end of numbers: is 123 a whole value or
|
|
||||||
// the beginning of 12345e+6?).
|
|
||||||
type scanner struct {
|
|
||||||
// The step is a func to be called to execute the next transition.
|
|
||||||
// Also tried using an integer constant and a single func
|
|
||||||
// with a switch, but using the func directly was 10% faster
|
|
||||||
// on a 64-bit Mac Mini, and it's nicer to read.
|
|
||||||
step func(*scanner, byte) int
|
|
||||||
|
|
||||||
// Reached end of top-level value.
|
|
||||||
endTop bool
|
|
||||||
|
|
||||||
// Stack of what we're in the middle of - array values, object keys, object values.
|
|
||||||
parseState []int
|
|
||||||
|
|
||||||
// Error that happened, if any.
|
|
||||||
err error
|
|
||||||
|
|
||||||
// 1-byte redo (see undo method)
|
|
||||||
redo bool
|
|
||||||
redoCode int
|
|
||||||
redoState func(*scanner, byte) int
|
|
||||||
|
|
||||||
// total bytes consumed, updated by decoder.Decode
|
|
||||||
bytes int64
|
|
||||||
}
|
|
||||||
|
|
||||||
// These values are returned by the state transition functions
|
|
||||||
// assigned to scanner.state and the method scanner.eof.
|
|
||||||
// They give details about the current state of the scan that
|
|
||||||
// callers might be interested to know about.
|
|
||||||
// It is okay to ignore the return value of any particular
|
|
||||||
// call to scanner.state: if one call returns scanError,
|
|
||||||
// every subsequent call will return scanError too.
|
|
||||||
const (
|
|
||||||
// Continue.
|
|
||||||
scanContinue = iota // uninteresting byte
|
|
||||||
scanBeginLiteral // end implied by next result != scanContinue
|
|
||||||
scanBeginObject // begin object
|
|
||||||
scanObjectKey // just finished object key (string)
|
|
||||||
scanObjectValue // just finished non-last object value
|
|
||||||
scanEndObject // end object (implies scanObjectValue if possible)
|
|
||||||
scanBeginArray // begin array
|
|
||||||
scanArrayValue // just finished array value
|
|
||||||
scanEndArray // end array (implies scanArrayValue if possible)
|
|
||||||
scanBeginName // begin function call
|
|
||||||
scanParam // begin function argument
|
|
||||||
scanEndParams // end function call
|
|
||||||
scanSkipSpace // space byte; can skip; known to be last "continue" result
|
|
||||||
|
|
||||||
// Stop.
|
|
||||||
scanEnd // top-level value ended *before* this byte; known to be first "stop" result
|
|
||||||
scanError // hit an error, scanner.err.
|
|
||||||
)
|
|
||||||
|
|
||||||
// These values are stored in the parseState stack.
|
|
||||||
// They give the current state of a composite value
|
|
||||||
// being scanned. If the parser is inside a nested value
|
|
||||||
// the parseState describes the nested state, outermost at entry 0.
|
|
||||||
const (
|
|
||||||
parseObjectKey = iota // parsing object key (before colon)
|
|
||||||
parseObjectValue // parsing object value (after colon)
|
|
||||||
parseArrayValue // parsing array value
|
|
||||||
parseName // parsing unquoted name
|
|
||||||
parseParam // parsing function argument value
|
|
||||||
)
|
|
||||||
|
|
||||||
// reset prepares the scanner for use.
|
|
||||||
// It must be called before calling s.step.
|
|
||||||
func (s *scanner) reset() {
|
|
||||||
s.step = stateBeginValue
|
|
||||||
s.parseState = s.parseState[0:0]
|
|
||||||
s.err = nil
|
|
||||||
s.redo = false
|
|
||||||
s.endTop = false
|
|
||||||
}
|
|
||||||
|
|
||||||
// eof tells the scanner that the end of input has been reached.
|
|
||||||
// It returns a scan status just as s.step does.
|
|
||||||
func (s *scanner) eof() int {
|
|
||||||
if s.err != nil {
|
|
||||||
return scanError
|
|
||||||
}
|
|
||||||
if s.endTop {
|
|
||||||
return scanEnd
|
|
||||||
}
|
|
||||||
s.step(s, ' ')
|
|
||||||
if s.endTop {
|
|
||||||
return scanEnd
|
|
||||||
}
|
|
||||||
if s.err == nil {
|
|
||||||
s.err = &SyntaxError{"unexpected end of JSON input", s.bytes}
|
|
||||||
}
|
|
||||||
return scanError
|
|
||||||
}
|
|
||||||
|
|
||||||
// pushParseState pushes a new parse state p onto the parse stack.
|
|
||||||
func (s *scanner) pushParseState(p int) {
|
|
||||||
s.parseState = append(s.parseState, p)
|
|
||||||
}
|
|
||||||
|
|
||||||
// popParseState pops a parse state (already obtained) off the stack
|
|
||||||
// and updates s.step accordingly.
|
|
||||||
func (s *scanner) popParseState() {
|
|
||||||
n := len(s.parseState) - 1
|
|
||||||
s.parseState = s.parseState[0:n]
|
|
||||||
s.redo = false
|
|
||||||
if n == 0 {
|
|
||||||
s.step = stateEndTop
|
|
||||||
s.endTop = true
|
|
||||||
} else {
|
|
||||||
s.step = stateEndValue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func isSpace(c byte) bool {
|
|
||||||
return c == ' ' || c == '\t' || c == '\r' || c == '\n'
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateBeginValueOrEmpty is the state after reading `[`.
|
|
||||||
func stateBeginValueOrEmpty(s *scanner, c byte) int {
|
|
||||||
if c <= ' ' && isSpace(c) {
|
|
||||||
return scanSkipSpace
|
|
||||||
}
|
|
||||||
if c == ']' {
|
|
||||||
return stateEndValue(s, c)
|
|
||||||
}
|
|
||||||
return stateBeginValue(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateBeginValue is the state at the beginning of the input.
|
|
||||||
func stateBeginValue(s *scanner, c byte) int {
|
|
||||||
if c <= ' ' && isSpace(c) {
|
|
||||||
return scanSkipSpace
|
|
||||||
}
|
|
||||||
switch c {
|
|
||||||
case '{':
|
|
||||||
s.step = stateBeginStringOrEmpty
|
|
||||||
s.pushParseState(parseObjectKey)
|
|
||||||
return scanBeginObject
|
|
||||||
case '[':
|
|
||||||
s.step = stateBeginValueOrEmpty
|
|
||||||
s.pushParseState(parseArrayValue)
|
|
||||||
return scanBeginArray
|
|
||||||
case '"':
|
|
||||||
s.step = stateInString
|
|
||||||
return scanBeginLiteral
|
|
||||||
case '-':
|
|
||||||
s.step = stateNeg
|
|
||||||
return scanBeginLiteral
|
|
||||||
case '0': // beginning of 0.123
|
|
||||||
s.step = state0
|
|
||||||
return scanBeginLiteral
|
|
||||||
case 'n':
|
|
||||||
s.step = stateNew0
|
|
||||||
return scanBeginName
|
|
||||||
}
|
|
||||||
if '1' <= c && c <= '9' { // beginning of 1234.5
|
|
||||||
s.step = state1
|
|
||||||
return scanBeginLiteral
|
|
||||||
}
|
|
||||||
if isName(c) {
|
|
||||||
s.step = stateName
|
|
||||||
return scanBeginName
|
|
||||||
}
|
|
||||||
return s.error(c, "looking for beginning of value")
|
|
||||||
}
|
|
||||||
|
|
||||||
func isName(c byte) bool {
|
|
||||||
return c == '$' || c == '_' || 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9'
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateBeginStringOrEmpty is the state after reading `{`.
|
|
||||||
func stateBeginStringOrEmpty(s *scanner, c byte) int {
|
|
||||||
if c <= ' ' && isSpace(c) {
|
|
||||||
return scanSkipSpace
|
|
||||||
}
|
|
||||||
if c == '}' {
|
|
||||||
n := len(s.parseState)
|
|
||||||
s.parseState[n-1] = parseObjectValue
|
|
||||||
return stateEndValue(s, c)
|
|
||||||
}
|
|
||||||
return stateBeginString(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateBeginString is the state after reading `{"key": value,`.
|
|
||||||
func stateBeginString(s *scanner, c byte) int {
|
|
||||||
if c <= ' ' && isSpace(c) {
|
|
||||||
return scanSkipSpace
|
|
||||||
}
|
|
||||||
if c == '"' {
|
|
||||||
s.step = stateInString
|
|
||||||
return scanBeginLiteral
|
|
||||||
}
|
|
||||||
if isName(c) {
|
|
||||||
s.step = stateName
|
|
||||||
return scanBeginName
|
|
||||||
}
|
|
||||||
return s.error(c, "looking for beginning of object key string")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateEndValue is the state after completing a value,
|
|
||||||
// such as after reading `{}` or `true` or `["x"`.
|
|
||||||
func stateEndValue(s *scanner, c byte) int {
|
|
||||||
n := len(s.parseState)
|
|
||||||
if n == 0 {
|
|
||||||
// Completed top-level before the current byte.
|
|
||||||
s.step = stateEndTop
|
|
||||||
s.endTop = true
|
|
||||||
return stateEndTop(s, c)
|
|
||||||
}
|
|
||||||
if c <= ' ' && isSpace(c) {
|
|
||||||
s.step = stateEndValue
|
|
||||||
return scanSkipSpace
|
|
||||||
}
|
|
||||||
ps := s.parseState[n-1]
|
|
||||||
switch ps {
|
|
||||||
case parseObjectKey:
|
|
||||||
if c == ':' {
|
|
||||||
s.parseState[n-1] = parseObjectValue
|
|
||||||
s.step = stateBeginValue
|
|
||||||
return scanObjectKey
|
|
||||||
}
|
|
||||||
return s.error(c, "after object key")
|
|
||||||
case parseObjectValue:
|
|
||||||
if c == ',' {
|
|
||||||
s.parseState[n-1] = parseObjectKey
|
|
||||||
s.step = stateBeginStringOrEmpty
|
|
||||||
return scanObjectValue
|
|
||||||
}
|
|
||||||
if c == '}' {
|
|
||||||
s.popParseState()
|
|
||||||
return scanEndObject
|
|
||||||
}
|
|
||||||
return s.error(c, "after object key:value pair")
|
|
||||||
case parseArrayValue:
|
|
||||||
if c == ',' {
|
|
||||||
s.step = stateBeginValueOrEmpty
|
|
||||||
return scanArrayValue
|
|
||||||
}
|
|
||||||
if c == ']' {
|
|
||||||
s.popParseState()
|
|
||||||
return scanEndArray
|
|
||||||
}
|
|
||||||
return s.error(c, "after array element")
|
|
||||||
case parseParam:
|
|
||||||
if c == ',' {
|
|
||||||
s.step = stateBeginValue
|
|
||||||
return scanParam
|
|
||||||
}
|
|
||||||
if c == ')' {
|
|
||||||
s.popParseState()
|
|
||||||
return scanEndParams
|
|
||||||
}
|
|
||||||
return s.error(c, "after array element")
|
|
||||||
}
|
|
||||||
return s.error(c, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateEndTop is the state after finishing the top-level value,
|
|
||||||
// such as after reading `{}` or `[1,2,3]`.
|
|
||||||
// Only space characters should be seen now.
|
|
||||||
func stateEndTop(s *scanner, c byte) int {
|
|
||||||
if c != ' ' && c != '\t' && c != '\r' && c != '\n' {
|
|
||||||
// Complain about non-space byte on next call.
|
|
||||||
s.error(c, "after top-level value")
|
|
||||||
}
|
|
||||||
return scanEnd
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateInString is the state after reading `"`.
|
|
||||||
func stateInString(s *scanner, c byte) int {
|
|
||||||
if c == '"' {
|
|
||||||
s.step = stateEndValue
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
if c == '\\' {
|
|
||||||
s.step = stateInStringEsc
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
if c < 0x20 {
|
|
||||||
return s.error(c, "in string literal")
|
|
||||||
}
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateInStringEsc is the state after reading `"\` during a quoted string.
|
|
||||||
func stateInStringEsc(s *scanner, c byte) int {
|
|
||||||
switch c {
|
|
||||||
case 'b', 'f', 'n', 'r', 't', '\\', '/', '"':
|
|
||||||
s.step = stateInString
|
|
||||||
return scanContinue
|
|
||||||
case 'u':
|
|
||||||
s.step = stateInStringEscU
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in string escape code")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateInStringEscU is the state after reading `"\u` during a quoted string.
|
|
||||||
func stateInStringEscU(s *scanner, c byte) int {
|
|
||||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
|
||||||
s.step = stateInStringEscU1
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
// numbers
|
|
||||||
return s.error(c, "in \\u hexadecimal character escape")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateInStringEscU1 is the state after reading `"\u1` during a quoted string.
|
|
||||||
func stateInStringEscU1(s *scanner, c byte) int {
|
|
||||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
|
||||||
s.step = stateInStringEscU12
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
// numbers
|
|
||||||
return s.error(c, "in \\u hexadecimal character escape")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateInStringEscU12 is the state after reading `"\u12` during a quoted string.
|
|
||||||
func stateInStringEscU12(s *scanner, c byte) int {
|
|
||||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
|
||||||
s.step = stateInStringEscU123
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
// numbers
|
|
||||||
return s.error(c, "in \\u hexadecimal character escape")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateInStringEscU123 is the state after reading `"\u123` during a quoted string.
|
|
||||||
func stateInStringEscU123(s *scanner, c byte) int {
|
|
||||||
if '0' <= c && c <= '9' || 'a' <= c && c <= 'f' || 'A' <= c && c <= 'F' {
|
|
||||||
s.step = stateInString
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
// numbers
|
|
||||||
return s.error(c, "in \\u hexadecimal character escape")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateNeg is the state after reading `-` during a number.
|
|
||||||
func stateNeg(s *scanner, c byte) int {
|
|
||||||
if c == '0' {
|
|
||||||
s.step = state0
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
if '1' <= c && c <= '9' {
|
|
||||||
s.step = state1
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in numeric literal")
|
|
||||||
}
|
|
||||||
|
|
||||||
// state1 is the state after reading a non-zero integer during a number,
|
|
||||||
// such as after reading `1` or `100` but not `0`.
|
|
||||||
func state1(s *scanner, c byte) int {
|
|
||||||
if '0' <= c && c <= '9' {
|
|
||||||
s.step = state1
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return state0(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// state0 is the state after reading `0` during a number.
|
|
||||||
func state0(s *scanner, c byte) int {
|
|
||||||
if c == '.' {
|
|
||||||
s.step = stateDot
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
if c == 'e' || c == 'E' {
|
|
||||||
s.step = stateE
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return stateEndValue(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateDot is the state after reading the integer and decimal point in a number,
|
|
||||||
// such as after reading `1.`.
|
|
||||||
func stateDot(s *scanner, c byte) int {
|
|
||||||
if '0' <= c && c <= '9' {
|
|
||||||
s.step = stateDot0
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "after decimal point in numeric literal")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateDot0 is the state after reading the integer, decimal point, and subsequent
|
|
||||||
// digits of a number, such as after reading `3.14`.
|
|
||||||
func stateDot0(s *scanner, c byte) int {
|
|
||||||
if '0' <= c && c <= '9' {
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
if c == 'e' || c == 'E' {
|
|
||||||
s.step = stateE
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return stateEndValue(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateE is the state after reading the mantissa and e in a number,
|
|
||||||
// such as after reading `314e` or `0.314e`.
|
|
||||||
func stateE(s *scanner, c byte) int {
|
|
||||||
if c == '+' || c == '-' {
|
|
||||||
s.step = stateESign
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return stateESign(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateESign is the state after reading the mantissa, e, and sign in a number,
|
|
||||||
// such as after reading `314e-` or `0.314e+`.
|
|
||||||
func stateESign(s *scanner, c byte) int {
|
|
||||||
if '0' <= c && c <= '9' {
|
|
||||||
s.step = stateE0
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in exponent of numeric literal")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateE0 is the state after reading the mantissa, e, optional sign,
|
|
||||||
// and at least one digit of the exponent in a number,
|
|
||||||
// such as after reading `314e-2` or `0.314e+1` or `3.14e0`.
|
|
||||||
func stateE0(s *scanner, c byte) int {
|
|
||||||
if '0' <= c && c <= '9' {
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return stateEndValue(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateNew0 is the state after reading `n`.
|
|
||||||
func stateNew0(s *scanner, c byte) int {
|
|
||||||
if c == 'e' {
|
|
||||||
s.step = stateNew1
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
s.step = stateName
|
|
||||||
return stateName(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateNew1 is the state after reading `ne`.
|
|
||||||
func stateNew1(s *scanner, c byte) int {
|
|
||||||
if c == 'w' {
|
|
||||||
s.step = stateNew2
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
s.step = stateName
|
|
||||||
return stateName(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateNew2 is the state after reading `new`.
|
|
||||||
func stateNew2(s *scanner, c byte) int {
|
|
||||||
s.step = stateName
|
|
||||||
if c == ' ' {
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return stateName(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateName is the state while reading an unquoted function name.
|
|
||||||
func stateName(s *scanner, c byte) int {
|
|
||||||
if isName(c) {
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
if c == '(' {
|
|
||||||
s.step = stateParamOrEmpty
|
|
||||||
s.pushParseState(parseParam)
|
|
||||||
return scanParam
|
|
||||||
}
|
|
||||||
return stateEndValue(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateParamOrEmpty is the state after reading `(`.
|
|
||||||
func stateParamOrEmpty(s *scanner, c byte) int {
|
|
||||||
if c <= ' ' && isSpace(c) {
|
|
||||||
return scanSkipSpace
|
|
||||||
}
|
|
||||||
if c == ')' {
|
|
||||||
return stateEndValue(s, c)
|
|
||||||
}
|
|
||||||
return stateBeginValue(s, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateT is the state after reading `t`.
|
|
||||||
func stateT(s *scanner, c byte) int {
|
|
||||||
if c == 'r' {
|
|
||||||
s.step = stateTr
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal true (expecting 'r')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateTr is the state after reading `tr`.
|
|
||||||
func stateTr(s *scanner, c byte) int {
|
|
||||||
if c == 'u' {
|
|
||||||
s.step = stateTru
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal true (expecting 'u')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateTru is the state after reading `tru`.
|
|
||||||
func stateTru(s *scanner, c byte) int {
|
|
||||||
if c == 'e' {
|
|
||||||
s.step = stateEndValue
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal true (expecting 'e')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateF is the state after reading `f`.
|
|
||||||
func stateF(s *scanner, c byte) int {
|
|
||||||
if c == 'a' {
|
|
||||||
s.step = stateFa
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal false (expecting 'a')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateFa is the state after reading `fa`.
|
|
||||||
func stateFa(s *scanner, c byte) int {
|
|
||||||
if c == 'l' {
|
|
||||||
s.step = stateFal
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal false (expecting 'l')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateFal is the state after reading `fal`.
|
|
||||||
func stateFal(s *scanner, c byte) int {
|
|
||||||
if c == 's' {
|
|
||||||
s.step = stateFals
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal false (expecting 's')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateFals is the state after reading `fals`.
|
|
||||||
func stateFals(s *scanner, c byte) int {
|
|
||||||
if c == 'e' {
|
|
||||||
s.step = stateEndValue
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal false (expecting 'e')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateN is the state after reading `n`.
|
|
||||||
func stateN(s *scanner, c byte) int {
|
|
||||||
if c == 'u' {
|
|
||||||
s.step = stateNu
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal null (expecting 'u')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateNu is the state after reading `nu`.
|
|
||||||
func stateNu(s *scanner, c byte) int {
|
|
||||||
if c == 'l' {
|
|
||||||
s.step = stateNul
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal null (expecting 'l')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateNul is the state after reading `nul`.
|
|
||||||
func stateNul(s *scanner, c byte) int {
|
|
||||||
if c == 'l' {
|
|
||||||
s.step = stateEndValue
|
|
||||||
return scanContinue
|
|
||||||
}
|
|
||||||
return s.error(c, "in literal null (expecting 'l')")
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateError is the state after reaching a syntax error,
|
|
||||||
// such as after reading `[1}` or `5.1.2`.
|
|
||||||
func stateError(s *scanner, c byte) int {
|
|
||||||
return scanError
|
|
||||||
}
|
|
||||||
|
|
||||||
// error records an error and switches to the error state.
|
|
||||||
func (s *scanner) error(c byte, context string) int {
|
|
||||||
s.step = stateError
|
|
||||||
s.err = &SyntaxError{"invalid character " + quoteChar(c) + " " + context, s.bytes}
|
|
||||||
return scanError
|
|
||||||
}
|
|
||||||
|
|
||||||
// quoteChar formats c as a quoted character literal
|
|
||||||
func quoteChar(c byte) string {
|
|
||||||
// special cases - different from quoted strings
|
|
||||||
if c == '\'' {
|
|
||||||
return `'\''`
|
|
||||||
}
|
|
||||||
if c == '"' {
|
|
||||||
return `'"'`
|
|
||||||
}
|
|
||||||
|
|
||||||
// use quoted string with different quotation marks
|
|
||||||
s := strconv.Quote(string(c))
|
|
||||||
return "'" + s[1:len(s)-1] + "'"
|
|
||||||
}
|
|
||||||
|
|
||||||
// undo causes the scanner to return scanCode from the next state transition.
|
|
||||||
// This gives callers a simple 1-byte undo mechanism.
|
|
||||||
func (s *scanner) undo(scanCode int) {
|
|
||||||
if s.redo {
|
|
||||||
panic("json: invalid use of scanner")
|
|
||||||
}
|
|
||||||
s.redoCode = scanCode
|
|
||||||
s.redoState = s.step
|
|
||||||
s.step = stateRedo
|
|
||||||
s.redo = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// stateRedo helps implement the scanner's 1-byte undo.
|
|
||||||
func stateRedo(s *scanner, c byte) int {
|
|
||||||
s.redo = false
|
|
||||||
s.step = s.redoState
|
|
||||||
return s.redoCode
|
|
||||||
}
|
|
510
vendor/github.com/globalsign/mgo/internal/json/stream.go
generated
vendored
510
vendor/github.com/globalsign/mgo/internal/json/stream.go
generated
vendored
|
@ -1,510 +0,0 @@
|
||||||
// Copyright 2010 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package json
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A Decoder reads and decodes JSON values from an input stream.
|
|
||||||
type Decoder struct {
|
|
||||||
r io.Reader
|
|
||||||
buf []byte
|
|
||||||
d decodeState
|
|
||||||
scanp int // start of unread data in buf
|
|
||||||
scan scanner
|
|
||||||
err error
|
|
||||||
|
|
||||||
tokenState int
|
|
||||||
tokenStack []int
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewDecoder returns a new decoder that reads from r.
|
|
||||||
//
|
|
||||||
// The decoder introduces its own buffering and may
|
|
||||||
// read data from r beyond the JSON values requested.
|
|
||||||
func NewDecoder(r io.Reader) *Decoder {
|
|
||||||
return &Decoder{r: r}
|
|
||||||
}
|
|
||||||
|
|
||||||
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
|
||||||
// Number instead of as a float64.
|
|
||||||
func (dec *Decoder) UseNumber() { dec.d.useNumber = true }
|
|
||||||
|
|
||||||
// Decode reads the next JSON-encoded value from its
|
|
||||||
// input and stores it in the value pointed to by v.
|
|
||||||
//
|
|
||||||
// See the documentation for Unmarshal for details about
|
|
||||||
// the conversion of JSON into a Go value.
|
|
||||||
func (dec *Decoder) Decode(v interface{}) error {
|
|
||||||
if dec.err != nil {
|
|
||||||
return dec.err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := dec.tokenPrepareForDecode(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !dec.tokenValueAllowed() {
|
|
||||||
return &SyntaxError{msg: "not at beginning of value"}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read whole value into buffer.
|
|
||||||
n, err := dec.readValue()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
dec.d.init(dec.buf[dec.scanp : dec.scanp+n])
|
|
||||||
dec.scanp += n
|
|
||||||
|
|
||||||
// Don't save err from unmarshal into dec.err:
|
|
||||||
// the connection is still usable since we read a complete JSON
|
|
||||||
// object from it before the error happened.
|
|
||||||
err = dec.d.unmarshal(v)
|
|
||||||
|
|
||||||
// fixup token streaming state
|
|
||||||
dec.tokenValueEnd()
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Buffered returns a reader of the data remaining in the Decoder's
|
|
||||||
// buffer. The reader is valid until the next call to Decode.
|
|
||||||
func (dec *Decoder) Buffered() io.Reader {
|
|
||||||
return bytes.NewReader(dec.buf[dec.scanp:])
|
|
||||||
}
|
|
||||||
|
|
||||||
// readValue reads a JSON value into dec.buf.
|
|
||||||
// It returns the length of the encoding.
|
|
||||||
func (dec *Decoder) readValue() (int, error) {
|
|
||||||
dec.scan.reset()
|
|
||||||
|
|
||||||
scanp := dec.scanp
|
|
||||||
var err error
|
|
||||||
Input:
|
|
||||||
for {
|
|
||||||
// Look in the buffer for a new value.
|
|
||||||
for i, c := range dec.buf[scanp:] {
|
|
||||||
dec.scan.bytes++
|
|
||||||
v := dec.scan.step(&dec.scan, c)
|
|
||||||
if v == scanEnd {
|
|
||||||
scanp += i
|
|
||||||
break Input
|
|
||||||
}
|
|
||||||
// scanEnd is delayed one byte.
|
|
||||||
// We might block trying to get that byte from src,
|
|
||||||
// so instead invent a space byte.
|
|
||||||
if (v == scanEndObject || v == scanEndArray) && dec.scan.step(&dec.scan, ' ') == scanEnd {
|
|
||||||
scanp += i + 1
|
|
||||||
break Input
|
|
||||||
}
|
|
||||||
if v == scanError {
|
|
||||||
dec.err = dec.scan.err
|
|
||||||
return 0, dec.scan.err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
scanp = len(dec.buf)
|
|
||||||
|
|
||||||
// Did the last read have an error?
|
|
||||||
// Delayed until now to allow buffer scan.
|
|
||||||
if err != nil {
|
|
||||||
if err == io.EOF {
|
|
||||||
if dec.scan.step(&dec.scan, ' ') == scanEnd {
|
|
||||||
break Input
|
|
||||||
}
|
|
||||||
if nonSpace(dec.buf) {
|
|
||||||
err = io.ErrUnexpectedEOF
|
|
||||||
}
|
|
||||||
}
|
|
||||||
dec.err = err
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
|
|
||||||
n := scanp - dec.scanp
|
|
||||||
err = dec.refill()
|
|
||||||
scanp = dec.scanp + n
|
|
||||||
}
|
|
||||||
return scanp - dec.scanp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dec *Decoder) refill() error {
|
|
||||||
// Make room to read more into the buffer.
|
|
||||||
// First slide down data already consumed.
|
|
||||||
if dec.scanp > 0 {
|
|
||||||
n := copy(dec.buf, dec.buf[dec.scanp:])
|
|
||||||
dec.buf = dec.buf[:n]
|
|
||||||
dec.scanp = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// Grow buffer if not large enough.
|
|
||||||
const minRead = 512
|
|
||||||
if cap(dec.buf)-len(dec.buf) < minRead {
|
|
||||||
newBuf := make([]byte, len(dec.buf), 2*cap(dec.buf)+minRead)
|
|
||||||
copy(newBuf, dec.buf)
|
|
||||||
dec.buf = newBuf
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read. Delay error for next iteration (after scan).
|
|
||||||
n, err := dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)])
|
|
||||||
dec.buf = dec.buf[0 : len(dec.buf)+n]
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func nonSpace(b []byte) bool {
|
|
||||||
for _, c := range b {
|
|
||||||
if !isSpace(c) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// An Encoder writes JSON values to an output stream.
|
|
||||||
type Encoder struct {
|
|
||||||
w io.Writer
|
|
||||||
err error
|
|
||||||
escapeHTML bool
|
|
||||||
|
|
||||||
indentBuf *bytes.Buffer
|
|
||||||
indentPrefix string
|
|
||||||
indentValue string
|
|
||||||
|
|
||||||
ext Extension
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewEncoder returns a new encoder that writes to w.
|
|
||||||
func NewEncoder(w io.Writer) *Encoder {
|
|
||||||
return &Encoder{w: w, escapeHTML: true}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Encode writes the JSON encoding of v to the stream,
|
|
||||||
// followed by a newline character.
|
|
||||||
//
|
|
||||||
// See the documentation for Marshal for details about the
|
|
||||||
// conversion of Go values to JSON.
|
|
||||||
func (enc *Encoder) Encode(v interface{}) error {
|
|
||||||
if enc.err != nil {
|
|
||||||
return enc.err
|
|
||||||
}
|
|
||||||
e := newEncodeState()
|
|
||||||
e.ext = enc.ext
|
|
||||||
err := e.marshal(v, encOpts{escapeHTML: enc.escapeHTML})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Terminate each value with a newline.
|
|
||||||
// This makes the output look a little nicer
|
|
||||||
// when debugging, and some kind of space
|
|
||||||
// is required if the encoded value was a number,
|
|
||||||
// so that the reader knows there aren't more
|
|
||||||
// digits coming.
|
|
||||||
e.WriteByte('\n')
|
|
||||||
|
|
||||||
b := e.Bytes()
|
|
||||||
if enc.indentBuf != nil {
|
|
||||||
enc.indentBuf.Reset()
|
|
||||||
err = Indent(enc.indentBuf, b, enc.indentPrefix, enc.indentValue)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
b = enc.indentBuf.Bytes()
|
|
||||||
}
|
|
||||||
if _, err = enc.w.Write(b); err != nil {
|
|
||||||
enc.err = err
|
|
||||||
}
|
|
||||||
encodeStatePool.Put(e)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Indent sets the encoder to format each encoded value with Indent.
|
|
||||||
func (enc *Encoder) Indent(prefix, indent string) {
|
|
||||||
enc.indentBuf = new(bytes.Buffer)
|
|
||||||
enc.indentPrefix = prefix
|
|
||||||
enc.indentValue = indent
|
|
||||||
}
|
|
||||||
|
|
||||||
// DisableHTMLEscaping causes the encoder not to escape angle brackets
|
|
||||||
// ("<" and ">") or ampersands ("&") in JSON strings.
|
|
||||||
func (enc *Encoder) DisableHTMLEscaping() {
|
|
||||||
enc.escapeHTML = false
|
|
||||||
}
|
|
||||||
|
|
||||||
// RawMessage is a raw encoded JSON value.
|
|
||||||
// It implements Marshaler and Unmarshaler and can
|
|
||||||
// be used to delay JSON decoding or precompute a JSON encoding.
|
|
||||||
type RawMessage []byte
|
|
||||||
|
|
||||||
// MarshalJSON returns *m as the JSON encoding of m.
|
|
||||||
func (m *RawMessage) MarshalJSON() ([]byte, error) {
|
|
||||||
return *m, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnmarshalJSON sets *m to a copy of data.
|
|
||||||
func (m *RawMessage) UnmarshalJSON(data []byte) error {
|
|
||||||
if m == nil {
|
|
||||||
return errors.New("json.RawMessage: UnmarshalJSON on nil pointer")
|
|
||||||
}
|
|
||||||
*m = append((*m)[0:0], data...)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ Marshaler = (*RawMessage)(nil)
|
|
||||||
var _ Unmarshaler = (*RawMessage)(nil)
|
|
||||||
|
|
||||||
// A Token holds a value of one of these types:
|
|
||||||
//
|
|
||||||
// Delim, for the four JSON delimiters [ ] { }
|
|
||||||
// bool, for JSON booleans
|
|
||||||
// float64, for JSON numbers
|
|
||||||
// Number, for JSON numbers
|
|
||||||
// string, for JSON string literals
|
|
||||||
// nil, for JSON null
|
|
||||||
//
|
|
||||||
type Token interface{}
|
|
||||||
|
|
||||||
const (
|
|
||||||
tokenTopValue = iota
|
|
||||||
tokenArrayStart
|
|
||||||
tokenArrayValue
|
|
||||||
tokenArrayComma
|
|
||||||
tokenObjectStart
|
|
||||||
tokenObjectKey
|
|
||||||
tokenObjectColon
|
|
||||||
tokenObjectValue
|
|
||||||
tokenObjectComma
|
|
||||||
)
|
|
||||||
|
|
||||||
// advance tokenstate from a separator state to a value state
|
|
||||||
func (dec *Decoder) tokenPrepareForDecode() error {
|
|
||||||
// Note: Not calling peek before switch, to avoid
|
|
||||||
// putting peek into the standard Decode path.
|
|
||||||
// peek is only called when using the Token API.
|
|
||||||
switch dec.tokenState {
|
|
||||||
case tokenArrayComma:
|
|
||||||
c, err := dec.peek()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if c != ',' {
|
|
||||||
return &SyntaxError{"expected comma after array element", 0}
|
|
||||||
}
|
|
||||||
dec.scanp++
|
|
||||||
dec.tokenState = tokenArrayValue
|
|
||||||
case tokenObjectColon:
|
|
||||||
c, err := dec.peek()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if c != ':' {
|
|
||||||
return &SyntaxError{"expected colon after object key", 0}
|
|
||||||
}
|
|
||||||
dec.scanp++
|
|
||||||
dec.tokenState = tokenObjectValue
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dec *Decoder) tokenValueAllowed() bool {
|
|
||||||
switch dec.tokenState {
|
|
||||||
case tokenTopValue, tokenArrayStart, tokenArrayValue, tokenObjectValue:
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dec *Decoder) tokenValueEnd() {
|
|
||||||
switch dec.tokenState {
|
|
||||||
case tokenArrayStart, tokenArrayValue:
|
|
||||||
dec.tokenState = tokenArrayComma
|
|
||||||
case tokenObjectValue:
|
|
||||||
dec.tokenState = tokenObjectComma
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// A Delim is a JSON array or object delimiter, one of [ ] { or }.
|
|
||||||
type Delim rune
|
|
||||||
|
|
||||||
func (d Delim) String() string {
|
|
||||||
return string(d)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Token returns the next JSON token in the input stream.
|
|
||||||
// At the end of the input stream, Token returns nil, io.EOF.
|
|
||||||
//
|
|
||||||
// Token guarantees that the delimiters [ ] { } it returns are
|
|
||||||
// properly nested and matched: if Token encounters an unexpected
|
|
||||||
// delimiter in the input, it will return an error.
|
|
||||||
//
|
|
||||||
// The input stream consists of basic JSON values—bool, string,
|
|
||||||
// number, and null—along with delimiters [ ] { } of type Delim
|
|
||||||
// to mark the start and end of arrays and objects.
|
|
||||||
// Commas and colons are elided.
|
|
||||||
func (dec *Decoder) Token() (Token, error) {
|
|
||||||
for {
|
|
||||||
c, err := dec.peek()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
switch c {
|
|
||||||
case '[':
|
|
||||||
if !dec.tokenValueAllowed() {
|
|
||||||
return dec.tokenError(c)
|
|
||||||
}
|
|
||||||
dec.scanp++
|
|
||||||
dec.tokenStack = append(dec.tokenStack, dec.tokenState)
|
|
||||||
dec.tokenState = tokenArrayStart
|
|
||||||
return Delim('['), nil
|
|
||||||
|
|
||||||
case ']':
|
|
||||||
if dec.tokenState != tokenArrayStart && dec.tokenState != tokenArrayComma {
|
|
||||||
return dec.tokenError(c)
|
|
||||||
}
|
|
||||||
dec.scanp++
|
|
||||||
dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
|
|
||||||
dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
|
|
||||||
dec.tokenValueEnd()
|
|
||||||
return Delim(']'), nil
|
|
||||||
|
|
||||||
case '{':
|
|
||||||
if !dec.tokenValueAllowed() {
|
|
||||||
return dec.tokenError(c)
|
|
||||||
}
|
|
||||||
dec.scanp++
|
|
||||||
dec.tokenStack = append(dec.tokenStack, dec.tokenState)
|
|
||||||
dec.tokenState = tokenObjectStart
|
|
||||||
return Delim('{'), nil
|
|
||||||
|
|
||||||
case '}':
|
|
||||||
if dec.tokenState != tokenObjectStart && dec.tokenState != tokenObjectComma {
|
|
||||||
return dec.tokenError(c)
|
|
||||||
}
|
|
||||||
dec.scanp++
|
|
||||||
dec.tokenState = dec.tokenStack[len(dec.tokenStack)-1]
|
|
||||||
dec.tokenStack = dec.tokenStack[:len(dec.tokenStack)-1]
|
|
||||||
dec.tokenValueEnd()
|
|
||||||
return Delim('}'), nil
|
|
||||||
|
|
||||||
case ':':
|
|
||||||
if dec.tokenState != tokenObjectColon {
|
|
||||||
return dec.tokenError(c)
|
|
||||||
}
|
|
||||||
dec.scanp++
|
|
||||||
dec.tokenState = tokenObjectValue
|
|
||||||
continue
|
|
||||||
|
|
||||||
case ',':
|
|
||||||
if dec.tokenState == tokenArrayComma {
|
|
||||||
dec.scanp++
|
|
||||||
dec.tokenState = tokenArrayValue
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if dec.tokenState == tokenObjectComma {
|
|
||||||
dec.scanp++
|
|
||||||
dec.tokenState = tokenObjectKey
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
return dec.tokenError(c)
|
|
||||||
|
|
||||||
case '"':
|
|
||||||
if dec.tokenState == tokenObjectStart || dec.tokenState == tokenObjectKey {
|
|
||||||
var x string
|
|
||||||
old := dec.tokenState
|
|
||||||
dec.tokenState = tokenTopValue
|
|
||||||
err := dec.Decode(&x)
|
|
||||||
dec.tokenState = old
|
|
||||||
if err != nil {
|
|
||||||
clearOffset(err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
dec.tokenState = tokenObjectColon
|
|
||||||
return x, nil
|
|
||||||
}
|
|
||||||
fallthrough
|
|
||||||
|
|
||||||
default:
|
|
||||||
if !dec.tokenValueAllowed() {
|
|
||||||
return dec.tokenError(c)
|
|
||||||
}
|
|
||||||
var x interface{}
|
|
||||||
if err := dec.Decode(&x); err != nil {
|
|
||||||
clearOffset(err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return x, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func clearOffset(err error) {
|
|
||||||
if s, ok := err.(*SyntaxError); ok {
|
|
||||||
s.Offset = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dec *Decoder) tokenError(c byte) (Token, error) {
|
|
||||||
var context string
|
|
||||||
switch dec.tokenState {
|
|
||||||
case tokenTopValue:
|
|
||||||
context = " looking for beginning of value"
|
|
||||||
case tokenArrayStart, tokenArrayValue, tokenObjectValue:
|
|
||||||
context = " looking for beginning of value"
|
|
||||||
case tokenArrayComma:
|
|
||||||
context = " after array element"
|
|
||||||
case tokenObjectKey:
|
|
||||||
context = " looking for beginning of object key string"
|
|
||||||
case tokenObjectColon:
|
|
||||||
context = " after object key"
|
|
||||||
case tokenObjectComma:
|
|
||||||
context = " after object key:value pair"
|
|
||||||
}
|
|
||||||
return nil, &SyntaxError{"invalid character " + quoteChar(c) + " " + context, 0}
|
|
||||||
}
|
|
||||||
|
|
||||||
// More reports whether there is another element in the
|
|
||||||
// current array or object being parsed.
|
|
||||||
func (dec *Decoder) More() bool {
|
|
||||||
c, err := dec.peek()
|
|
||||||
return err == nil && c != ']' && c != '}'
|
|
||||||
}
|
|
||||||
|
|
||||||
func (dec *Decoder) peek() (byte, error) {
|
|
||||||
var err error
|
|
||||||
for {
|
|
||||||
for i := dec.scanp; i < len(dec.buf); i++ {
|
|
||||||
c := dec.buf[i]
|
|
||||||
if isSpace(c) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
dec.scanp = i
|
|
||||||
return c, nil
|
|
||||||
}
|
|
||||||
// buffer has been scanned, now report any error
|
|
||||||
if err != nil {
|
|
||||||
return 0, err
|
|
||||||
}
|
|
||||||
err = dec.refill()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
TODO
|
|
||||||
|
|
||||||
// EncodeToken writes the given JSON token to the stream.
|
|
||||||
// It returns an error if the delimiters [ ] { } are not properly used.
|
|
||||||
//
|
|
||||||
// EncodeToken does not call Flush, because usually it is part of
|
|
||||||
// a larger operation such as Encode, and those will call Flush when finished.
|
|
||||||
// Callers that create an Encoder and then invoke EncodeToken directly,
|
|
||||||
// without using Encode, need to call Flush when finished to ensure that
|
|
||||||
// the JSON is written to the underlying writer.
|
|
||||||
func (e *Encoder) EncodeToken(t Token) error {
|
|
||||||
...
|
|
||||||
}
|
|
||||||
|
|
||||||
*/
|
|
44
vendor/github.com/globalsign/mgo/internal/json/tags.go
generated
vendored
44
vendor/github.com/globalsign/mgo/internal/json/tags.go
generated
vendored
|
@ -1,44 +0,0 @@
|
||||||
// Copyright 2011 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
package json
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// tagOptions is the string following a comma in a struct field's "json"
|
|
||||||
// tag, or the empty string. It does not include the leading comma.
|
|
||||||
type tagOptions string
|
|
||||||
|
|
||||||
// parseTag splits a struct field's json tag into its name and
|
|
||||||
// comma-separated options.
|
|
||||||
func parseTag(tag string) (string, tagOptions) {
|
|
||||||
if idx := strings.Index(tag, ","); idx != -1 {
|
|
||||||
return tag[:idx], tagOptions(tag[idx+1:])
|
|
||||||
}
|
|
||||||
return tag, tagOptions("")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Contains reports whether a comma-separated list of options
|
|
||||||
// contains a particular substr flag. substr must be surrounded by a
|
|
||||||
// string boundary or commas.
|
|
||||||
func (o tagOptions) Contains(optionName string) bool {
|
|
||||||
if len(o) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
s := string(o)
|
|
||||||
for s != "" {
|
|
||||||
var next string
|
|
||||||
i := strings.Index(s, ",")
|
|
||||||
if i >= 0 {
|
|
||||||
s, next = s[:i], s[i+1:]
|
|
||||||
}
|
|
||||||
if s == optionName {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
s = next
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
5
vendor/github.com/go-openapi/analysis/.gitignore
generated
vendored
5
vendor/github.com/go-openapi/analysis/.gitignore
generated
vendored
|
@ -1,5 +0,0 @@
|
||||||
secrets.yml
|
|
||||||
coverage.out
|
|
||||||
coverage.txt
|
|
||||||
*.cov
|
|
||||||
.idea
|
|
19
vendor/github.com/go-openapi/analysis/.golangci.yml
generated
vendored
19
vendor/github.com/go-openapi/analysis/.golangci.yml
generated
vendored
|
@ -1,19 +0,0 @@
|
||||||
linters-settings:
|
|
||||||
govet:
|
|
||||||
check-shadowing: true
|
|
||||||
golint:
|
|
||||||
min-confidence: 0
|
|
||||||
gocyclo:
|
|
||||||
min-complexity: 30
|
|
||||||
maligned:
|
|
||||||
suggest-new: true
|
|
||||||
dupl:
|
|
||||||
threshold: 100
|
|
||||||
goconst:
|
|
||||||
min-len: 2
|
|
||||||
min-occurrences: 4
|
|
||||||
|
|
||||||
linters:
|
|
||||||
enable-all: true
|
|
||||||
disable:
|
|
||||||
- maligned
|
|
24
vendor/github.com/go-openapi/analysis/.travis.yml
generated
vendored
24
vendor/github.com/go-openapi/analysis/.travis.yml
generated
vendored
|
@ -1,24 +0,0 @@
|
||||||
after_success:
|
|
||||||
- bash <(curl -s https://codecov.io/bash)
|
|
||||||
go:
|
|
||||||
- '1.9'
|
|
||||||
- 1.10.x
|
|
||||||
- 1.11.x
|
|
||||||
install:
|
|
||||||
- go get -u github.com/axw/gocov/gocov
|
|
||||||
- go get -u gopkg.in/matm/v1/gocov-html
|
|
||||||
- go get -u github.com/cee-dub/go-junit-report
|
|
||||||
- go get -u github.com/docker/go-units
|
|
||||||
- go get -u github.com/stretchr/testify/assert
|
|
||||||
- go get -u gopkg.in/yaml.v2
|
|
||||||
- go get -u github.com/go-openapi/swag
|
|
||||||
- go get -u github.com/go-openapi/jsonpointer
|
|
||||||
- go get -u github.com/go-openapi/spec
|
|
||||||
- go get -u github.com/go-openapi/strfmt
|
|
||||||
- go get -u github.com/go-openapi/loads/fmts
|
|
||||||
language: go
|
|
||||||
notifications:
|
|
||||||
slack:
|
|
||||||
secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
|
|
||||||
script:
|
|
||||||
- hack/coverage
|
|
74
vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
generated
vendored
74
vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
generated
vendored
|
@ -1,74 +0,0 @@
|
||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
In the interest of fostering an open and welcoming environment, we as
|
|
||||||
contributors and maintainers pledge to making participation in our project and
|
|
||||||
our community a harassment-free experience for everyone, regardless of age, body
|
|
||||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
|
||||||
nationality, personal appearance, race, religion, or sexual identity and
|
|
||||||
orientation.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to creating a positive environment
|
|
||||||
include:
|
|
||||||
|
|
||||||
* Using welcoming and inclusive language
|
|
||||||
* Being respectful of differing viewpoints and experiences
|
|
||||||
* Gracefully accepting constructive criticism
|
|
||||||
* Focusing on what is best for the community
|
|
||||||
* Showing empathy towards other community members
|
|
||||||
|
|
||||||
Examples of unacceptable behavior by participants include:
|
|
||||||
|
|
||||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
|
||||||
advances
|
|
||||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
|
||||||
* Public or private harassment
|
|
||||||
* Publishing others' private information, such as a physical or electronic
|
|
||||||
address, without explicit permission
|
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
|
||||||
professional setting
|
|
||||||
|
|
||||||
## Our Responsibilities
|
|
||||||
|
|
||||||
Project maintainers are responsible for clarifying the standards of acceptable
|
|
||||||
behavior and are expected to take appropriate and fair corrective action in
|
|
||||||
response to any instances of unacceptable behavior.
|
|
||||||
|
|
||||||
Project maintainers have the right and responsibility to remove, edit, or
|
|
||||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
|
||||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
|
||||||
permanently any contributor for other behaviors that they deem inappropriate,
|
|
||||||
threatening, offensive, or harmful.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies both within project spaces and in public spaces
|
|
||||||
when an individual is representing the project or its community. Examples of
|
|
||||||
representing a project or community include using an official project e-mail
|
|
||||||
address, posting via an official social media account, or acting as an appointed
|
|
||||||
representative at an online or offline event. Representation of a project may be
|
|
||||||
further defined and clarified by project maintainers.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
|
||||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
|
||||||
complaints will be reviewed and investigated and will result in a response that
|
|
||||||
is deemed necessary and appropriate to the circumstances. The project team is
|
|
||||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
|
||||||
Further details of specific enforcement policies may be posted separately.
|
|
||||||
|
|
||||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
|
||||||
faith may face temporary or permanent repercussions as determined by other
|
|
||||||
members of the project's leadership.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
|
||||||
available at [http://contributor-covenant.org/version/1/4][version]
|
|
||||||
|
|
||||||
[homepage]: http://contributor-covenant.org
|
|
||||||
[version]: http://contributor-covenant.org/version/1/4/
|
|
202
vendor/github.com/go-openapi/analysis/LICENSE
generated
vendored
202
vendor/github.com/go-openapi/analysis/LICENSE
generated
vendored
|
@ -1,202 +0,0 @@
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
9
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
9
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
|
@ -1,9 +0,0 @@
|
||||||
# OpenAPI initiative analysis [![Build Status](https://travis-ci.org/go-openapi/analysis.svg?branch=master)](https://travis-ci.org/go-openapi/analysis) [![codecov](https://codecov.io/gh/go-openapi/analysis/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/analysis) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
|
|
||||||
|
|
||||||
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE)
|
|
||||||
[![GoDoc](https://godoc.org/github.com/go-openapi/analysis?status.svg)](http://godoc.org/github.com/go-openapi/analysis)
|
|
||||||
[![GolangCI](https://golangci.com/badges/github.com/go-openapi/analysis.svg)](https://golangci.com)
|
|
||||||
[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/analysis)](https://goreportcard.com/report/github.com/go-openapi/analysis)
|
|
||||||
|
|
||||||
|
|
||||||
A foundational library to analyze an OAI specification document for easier reasoning about the content.
|
|
892
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
892
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
|
@ -1,892 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
slashpath "path"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/go-openapi/jsonpointer"
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
"github.com/go-openapi/swag"
|
|
||||||
)
|
|
||||||
|
|
||||||
type referenceAnalysis struct {
|
|
||||||
schemas map[string]spec.Ref
|
|
||||||
responses map[string]spec.Ref
|
|
||||||
parameters map[string]spec.Ref
|
|
||||||
items map[string]spec.Ref
|
|
||||||
headerItems map[string]spec.Ref
|
|
||||||
parameterItems map[string]spec.Ref
|
|
||||||
allRefs map[string]spec.Ref
|
|
||||||
pathItems map[string]spec.Ref
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addRef(key string, ref spec.Ref) {
|
|
||||||
r.allRefs["#"+key] = ref
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items, location string) {
|
|
||||||
r.items["#"+key] = items.Ref
|
|
||||||
r.addRef(key, items.Ref)
|
|
||||||
if location == "header" {
|
|
||||||
// NOTE: in swagger 2.0, headers and parameters (but not body param schemas) are simple schemas
|
|
||||||
// and $ref are not supported here. However it is possible to analyze this.
|
|
||||||
r.headerItems["#"+key] = items.Ref
|
|
||||||
} else {
|
|
||||||
r.parameterItems["#"+key] = items.Ref
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) {
|
|
||||||
r.schemas["#"+key] = ref.Schema.Ref
|
|
||||||
r.addRef(key, ref.Schema.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) {
|
|
||||||
r.responses["#"+key] = resp.Ref
|
|
||||||
r.addRef(key, resp.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) {
|
|
||||||
r.parameters["#"+key] = param.Ref
|
|
||||||
r.addRef(key, param.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addPathItemRef(key string, pathItem *spec.PathItem) {
|
|
||||||
r.pathItems["#"+key] = pathItem.Ref
|
|
||||||
r.addRef(key, pathItem.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
type patternAnalysis struct {
|
|
||||||
parameters map[string]string
|
|
||||||
headers map[string]string
|
|
||||||
items map[string]string
|
|
||||||
schemas map[string]string
|
|
||||||
allPatterns map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addPattern(key, pattern string) {
|
|
||||||
p.allPatterns["#"+key] = pattern
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addParameterPattern(key, pattern string) {
|
|
||||||
p.parameters["#"+key] = pattern
|
|
||||||
p.addPattern(key, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addHeaderPattern(key, pattern string) {
|
|
||||||
p.headers["#"+key] = pattern
|
|
||||||
p.addPattern(key, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addItemsPattern(key, pattern string) {
|
|
||||||
p.items["#"+key] = pattern
|
|
||||||
p.addPattern(key, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addSchemaPattern(key, pattern string) {
|
|
||||||
p.schemas["#"+key] = pattern
|
|
||||||
p.addPattern(key, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
// New takes a swagger spec object and returns an analyzed spec document.
|
|
||||||
// The analyzed document contains a number of indices that make it easier to
|
|
||||||
// reason about semantics of a swagger specification for use in code generation
|
|
||||||
// or validation etc.
|
|
||||||
func New(doc *spec.Swagger) *Spec {
|
|
||||||
a := &Spec{
|
|
||||||
spec: doc,
|
|
||||||
consumes: make(map[string]struct{}, 150),
|
|
||||||
produces: make(map[string]struct{}, 150),
|
|
||||||
authSchemes: make(map[string]struct{}, 150),
|
|
||||||
operations: make(map[string]map[string]*spec.Operation, 150),
|
|
||||||
allSchemas: make(map[string]SchemaRef, 150),
|
|
||||||
allOfs: make(map[string]SchemaRef, 150),
|
|
||||||
references: referenceAnalysis{
|
|
||||||
schemas: make(map[string]spec.Ref, 150),
|
|
||||||
pathItems: make(map[string]spec.Ref, 150),
|
|
||||||
responses: make(map[string]spec.Ref, 150),
|
|
||||||
parameters: make(map[string]spec.Ref, 150),
|
|
||||||
items: make(map[string]spec.Ref, 150),
|
|
||||||
headerItems: make(map[string]spec.Ref, 150),
|
|
||||||
parameterItems: make(map[string]spec.Ref, 150),
|
|
||||||
allRefs: make(map[string]spec.Ref, 150),
|
|
||||||
},
|
|
||||||
patterns: patternAnalysis{
|
|
||||||
parameters: make(map[string]string, 150),
|
|
||||||
headers: make(map[string]string, 150),
|
|
||||||
items: make(map[string]string, 150),
|
|
||||||
schemas: make(map[string]string, 150),
|
|
||||||
allPatterns: make(map[string]string, 150),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
a.initialize()
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
|
|
||||||
// Spec is an analyzed specification object. It takes a swagger spec object and turns it into a registry
|
|
||||||
// with a bunch of utility methods to act on the information in the spec.
|
|
||||||
type Spec struct {
|
|
||||||
spec *spec.Swagger
|
|
||||||
consumes map[string]struct{}
|
|
||||||
produces map[string]struct{}
|
|
||||||
authSchemes map[string]struct{}
|
|
||||||
operations map[string]map[string]*spec.Operation
|
|
||||||
references referenceAnalysis
|
|
||||||
patterns patternAnalysis
|
|
||||||
allSchemas map[string]SchemaRef
|
|
||||||
allOfs map[string]SchemaRef
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) reset() {
|
|
||||||
s.consumes = make(map[string]struct{}, 150)
|
|
||||||
s.produces = make(map[string]struct{}, 150)
|
|
||||||
s.authSchemes = make(map[string]struct{}, 150)
|
|
||||||
s.operations = make(map[string]map[string]*spec.Operation, 150)
|
|
||||||
s.allSchemas = make(map[string]SchemaRef, 150)
|
|
||||||
s.allOfs = make(map[string]SchemaRef, 150)
|
|
||||||
s.references.schemas = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.pathItems = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.responses = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.parameters = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.items = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.headerItems = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.parameterItems = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.allRefs = make(map[string]spec.Ref, 150)
|
|
||||||
s.patterns.parameters = make(map[string]string, 150)
|
|
||||||
s.patterns.headers = make(map[string]string, 150)
|
|
||||||
s.patterns.items = make(map[string]string, 150)
|
|
||||||
s.patterns.schemas = make(map[string]string, 150)
|
|
||||||
s.patterns.allPatterns = make(map[string]string, 150)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) reload() {
|
|
||||||
s.reset()
|
|
||||||
s.initialize()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) initialize() {
|
|
||||||
for _, c := range s.spec.Consumes {
|
|
||||||
s.consumes[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, c := range s.spec.Produces {
|
|
||||||
s.produces[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, ss := range s.spec.Security {
|
|
||||||
for k := range ss {
|
|
||||||
s.authSchemes[k] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for path, pathItem := range s.AllPaths() {
|
|
||||||
s.analyzeOperations(path, &pathItem)
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, parameter := range s.spec.Parameters {
|
|
||||||
refPref := slashpath.Join("/parameters", jsonpointer.Escape(name))
|
|
||||||
if parameter.Items != nil {
|
|
||||||
s.analyzeItems("items", parameter.Items, refPref, "parameter")
|
|
||||||
}
|
|
||||||
if parameter.In == "body" && parameter.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *parameter.Schema, refPref)
|
|
||||||
}
|
|
||||||
if parameter.Pattern != "" {
|
|
||||||
s.patterns.addParameterPattern(refPref, parameter.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, response := range s.spec.Responses {
|
|
||||||
refPref := slashpath.Join("/responses", jsonpointer.Escape(name))
|
|
||||||
for k, v := range response.Headers {
|
|
||||||
hRefPref := slashpath.Join(refPref, "headers", k)
|
|
||||||
if v.Items != nil {
|
|
||||||
s.analyzeItems("items", v.Items, hRefPref, "header")
|
|
||||||
}
|
|
||||||
if v.Pattern != "" {
|
|
||||||
s.patterns.addHeaderPattern(hRefPref, v.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if response.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *response.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, schema := range s.spec.Definitions {
|
|
||||||
s.analyzeSchema(name, schema, "/definitions")
|
|
||||||
}
|
|
||||||
// TODO: after analyzing all things and flattening schemas etc
|
|
||||||
// resolve all the collected references to their final representations
|
|
||||||
// best put in a separate method because this could get expensive
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) {
|
|
||||||
// TODO: resolve refs here?
|
|
||||||
// Currently, operations declared via pathItem $ref are known only after expansion
|
|
||||||
op := pi
|
|
||||||
if pi.Ref.String() != "" {
|
|
||||||
key := slashpath.Join("/paths", jsonpointer.Escape(path))
|
|
||||||
s.references.addPathItemRef(key, pi)
|
|
||||||
}
|
|
||||||
s.analyzeOperation("GET", path, op.Get)
|
|
||||||
s.analyzeOperation("PUT", path, op.Put)
|
|
||||||
s.analyzeOperation("POST", path, op.Post)
|
|
||||||
s.analyzeOperation("PATCH", path, op.Patch)
|
|
||||||
s.analyzeOperation("DELETE", path, op.Delete)
|
|
||||||
s.analyzeOperation("HEAD", path, op.Head)
|
|
||||||
s.analyzeOperation("OPTIONS", path, op.Options)
|
|
||||||
for i, param := range op.Parameters {
|
|
||||||
refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i))
|
|
||||||
if param.Ref.String() != "" {
|
|
||||||
s.references.addParamRef(refPref, ¶m)
|
|
||||||
}
|
|
||||||
if param.Pattern != "" {
|
|
||||||
s.patterns.addParameterPattern(refPref, param.Pattern)
|
|
||||||
}
|
|
||||||
if param.Items != nil {
|
|
||||||
s.analyzeItems("items", param.Items, refPref, "parameter")
|
|
||||||
}
|
|
||||||
if param.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *param.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeItems(name string, items *spec.Items, prefix, location string) {
|
|
||||||
if items == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
refPref := slashpath.Join(prefix, name)
|
|
||||||
s.analyzeItems(name, items.Items, refPref, location)
|
|
||||||
if items.Ref.String() != "" {
|
|
||||||
s.references.addItemsRef(refPref, items, location)
|
|
||||||
}
|
|
||||||
if items.Pattern != "" {
|
|
||||||
s.patterns.addItemsPattern(refPref, items.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) {
|
|
||||||
if op == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, c := range op.Consumes {
|
|
||||||
s.consumes[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, c := range op.Produces {
|
|
||||||
s.produces[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, ss := range op.Security {
|
|
||||||
for k := range ss {
|
|
||||||
s.authSchemes[k] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if _, ok := s.operations[method]; !ok {
|
|
||||||
s.operations[method] = make(map[string]*spec.Operation)
|
|
||||||
}
|
|
||||||
s.operations[method][path] = op
|
|
||||||
prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method))
|
|
||||||
for i, param := range op.Parameters {
|
|
||||||
refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i))
|
|
||||||
if param.Ref.String() != "" {
|
|
||||||
s.references.addParamRef(refPref, ¶m)
|
|
||||||
}
|
|
||||||
if param.Pattern != "" {
|
|
||||||
s.patterns.addParameterPattern(refPref, param.Pattern)
|
|
||||||
}
|
|
||||||
s.analyzeItems("items", param.Items, refPref, "parameter")
|
|
||||||
if param.In == "body" && param.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *param.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if op.Responses != nil {
|
|
||||||
if op.Responses.Default != nil {
|
|
||||||
refPref := slashpath.Join(prefix, "responses", "default")
|
|
||||||
if op.Responses.Default.Ref.String() != "" {
|
|
||||||
s.references.addResponseRef(refPref, op.Responses.Default)
|
|
||||||
}
|
|
||||||
for k, v := range op.Responses.Default.Headers {
|
|
||||||
hRefPref := slashpath.Join(refPref, "headers", k)
|
|
||||||
s.analyzeItems("items", v.Items, hRefPref, "header")
|
|
||||||
if v.Pattern != "" {
|
|
||||||
s.patterns.addHeaderPattern(hRefPref, v.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if op.Responses.Default.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *op.Responses.Default.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for k, res := range op.Responses.StatusCodeResponses {
|
|
||||||
refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k))
|
|
||||||
if res.Ref.String() != "" {
|
|
||||||
s.references.addResponseRef(refPref, &res)
|
|
||||||
}
|
|
||||||
for k, v := range res.Headers {
|
|
||||||
hRefPref := slashpath.Join(refPref, "headers", k)
|
|
||||||
s.analyzeItems("items", v.Items, hRefPref, "header")
|
|
||||||
if v.Pattern != "" {
|
|
||||||
s.patterns.addHeaderPattern(hRefPref, v.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if res.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *res.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeSchema(name string, schema spec.Schema, prefix string) {
|
|
||||||
refURI := slashpath.Join(prefix, jsonpointer.Escape(name))
|
|
||||||
schRef := SchemaRef{
|
|
||||||
Name: name,
|
|
||||||
Schema: &schema,
|
|
||||||
Ref: spec.MustCreateRef("#" + refURI),
|
|
||||||
TopLevel: prefix == "/definitions",
|
|
||||||
}
|
|
||||||
|
|
||||||
s.allSchemas["#"+refURI] = schRef
|
|
||||||
|
|
||||||
if schema.Ref.String() != "" {
|
|
||||||
s.references.addSchemaRef(refURI, schRef)
|
|
||||||
}
|
|
||||||
if schema.Pattern != "" {
|
|
||||||
s.patterns.addSchemaPattern(refURI, schema.Pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range schema.Definitions {
|
|
||||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "definitions"))
|
|
||||||
}
|
|
||||||
for k, v := range schema.Properties {
|
|
||||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "properties"))
|
|
||||||
}
|
|
||||||
for k, v := range schema.PatternProperties {
|
|
||||||
// NOTE: swagger 2.0 does not support PatternProperties.
|
|
||||||
// However it is possible to analyze this in a schema
|
|
||||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "patternProperties"))
|
|
||||||
}
|
|
||||||
for i, v := range schema.AllOf {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf"))
|
|
||||||
}
|
|
||||||
if len(schema.AllOf) > 0 {
|
|
||||||
s.allOfs["#"+refURI] = schRef
|
|
||||||
}
|
|
||||||
for i, v := range schema.AnyOf {
|
|
||||||
// NOTE: swagger 2.0 does not support anyOf constructs.
|
|
||||||
// However it is possible to analyze this in a schema
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf"))
|
|
||||||
}
|
|
||||||
for i, v := range schema.OneOf {
|
|
||||||
// NOTE: swagger 2.0 does not support oneOf constructs.
|
|
||||||
// However it is possible to analyze this in a schema
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf"))
|
|
||||||
}
|
|
||||||
if schema.Not != nil {
|
|
||||||
// NOTE: swagger 2.0 does not support "not" constructs.
|
|
||||||
// However it is possible to analyze this in a schema
|
|
||||||
s.analyzeSchema("not", *schema.Not, refURI)
|
|
||||||
}
|
|
||||||
if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
|
|
||||||
s.analyzeSchema("additionalProperties", *schema.AdditionalProperties.Schema, refURI)
|
|
||||||
}
|
|
||||||
if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
|
|
||||||
// NOTE: swagger 2.0 does not support AdditionalItems.
|
|
||||||
// However it is possible to analyze this in a schema
|
|
||||||
s.analyzeSchema("additionalItems", *schema.AdditionalItems.Schema, refURI)
|
|
||||||
}
|
|
||||||
if schema.Items != nil {
|
|
||||||
if schema.Items.Schema != nil {
|
|
||||||
s.analyzeSchema("items", *schema.Items.Schema, refURI)
|
|
||||||
}
|
|
||||||
for i, sch := range schema.Items.Schemas {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityRequirement is a representation of a security requirement for an operation
|
|
||||||
type SecurityRequirement struct {
|
|
||||||
Name string
|
|
||||||
Scopes []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityRequirementsFor gets the security requirements for the operation
|
|
||||||
func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) [][]SecurityRequirement {
|
|
||||||
if s.spec.Security == nil && operation.Security == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
schemes := s.spec.Security
|
|
||||||
if operation.Security != nil {
|
|
||||||
schemes = operation.Security
|
|
||||||
}
|
|
||||||
|
|
||||||
result := [][]SecurityRequirement{}
|
|
||||||
for _, scheme := range schemes {
|
|
||||||
if len(scheme) == 0 {
|
|
||||||
// append a zero object for anonymous
|
|
||||||
result = append(result, []SecurityRequirement{{}})
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
var reqs []SecurityRequirement
|
|
||||||
for k, v := range scheme {
|
|
||||||
if v == nil {
|
|
||||||
v = []string{}
|
|
||||||
}
|
|
||||||
reqs = append(reqs, SecurityRequirement{Name: k, Scopes: v})
|
|
||||||
}
|
|
||||||
result = append(result, reqs)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityDefinitionsForRequirements gets the matching security definitions for a set of requirements
|
|
||||||
func (s *Spec) SecurityDefinitionsForRequirements(requirements []SecurityRequirement) map[string]spec.SecurityScheme {
|
|
||||||
result := make(map[string]spec.SecurityScheme)
|
|
||||||
|
|
||||||
for _, v := range requirements {
|
|
||||||
if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
|
|
||||||
if definition != nil {
|
|
||||||
result[v.Name] = *definition
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityDefinitionsFor gets the matching security definitions for a set of requirements
|
|
||||||
func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme {
|
|
||||||
requirements := s.SecurityRequirementsFor(operation)
|
|
||||||
if len(requirements) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
result := make(map[string]spec.SecurityScheme)
|
|
||||||
for _, reqs := range requirements {
|
|
||||||
for _, v := range reqs {
|
|
||||||
if v.Name == "" {
|
|
||||||
// optional requirement
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := result[v.Name]; ok {
|
|
||||||
// duplicate requirement
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
|
|
||||||
if definition != nil {
|
|
||||||
result[v.Name] = *definition
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConsumesFor gets the mediatypes for the operation
|
|
||||||
func (s *Spec) ConsumesFor(operation *spec.Operation) []string {
|
|
||||||
|
|
||||||
if len(operation.Consumes) == 0 {
|
|
||||||
cons := make(map[string]struct{}, len(s.spec.Consumes))
|
|
||||||
for _, k := range s.spec.Consumes {
|
|
||||||
cons[k] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(cons)
|
|
||||||
}
|
|
||||||
|
|
||||||
cons := make(map[string]struct{}, len(operation.Consumes))
|
|
||||||
for _, c := range operation.Consumes {
|
|
||||||
cons[c] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(cons)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProducesFor gets the mediatypes for the operation
|
|
||||||
func (s *Spec) ProducesFor(operation *spec.Operation) []string {
|
|
||||||
if len(operation.Produces) == 0 {
|
|
||||||
prod := make(map[string]struct{}, len(s.spec.Produces))
|
|
||||||
for _, k := range s.spec.Produces {
|
|
||||||
prod[k] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(prod)
|
|
||||||
}
|
|
||||||
|
|
||||||
prod := make(map[string]struct{}, len(operation.Produces))
|
|
||||||
for _, c := range operation.Produces {
|
|
||||||
prod[c] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(prod)
|
|
||||||
}
|
|
||||||
|
|
||||||
func mapKeyFromParam(param *spec.Parameter) string {
|
|
||||||
return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param))
|
|
||||||
}
|
|
||||||
|
|
||||||
func fieldNameFromParam(param *spec.Parameter) string {
|
|
||||||
// TODO: this should be x-go-name
|
|
||||||
if nm, ok := param.Extensions.GetString("go-name"); ok {
|
|
||||||
return nm
|
|
||||||
}
|
|
||||||
return swag.ToGoName(param.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrorOnParamFunc is a callback function to be invoked
|
|
||||||
// whenever an error is encountered while resolving references
|
|
||||||
// on parameters.
|
|
||||||
//
|
|
||||||
// This function takes as input the spec.Parameter which triggered the
|
|
||||||
// error and the error itself.
|
|
||||||
//
|
|
||||||
// If the callback function returns false, the calling function should bail.
|
|
||||||
//
|
|
||||||
// If it returns true, the calling function should continue evaluating parameters.
|
|
||||||
// A nil ErrorOnParamFunc must be evaluated as equivalent to panic().
|
|
||||||
type ErrorOnParamFunc func(spec.Parameter, error) bool
|
|
||||||
|
|
||||||
func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter, callmeOnError ErrorOnParamFunc) {
|
|
||||||
for _, param := range parameters {
|
|
||||||
pr := param
|
|
||||||
if pr.Ref.String() != "" {
|
|
||||||
obj, _, err := pr.Ref.GetPointer().Get(s.spec)
|
|
||||||
if err != nil {
|
|
||||||
if callmeOnError != nil {
|
|
||||||
if callmeOnError(param, fmt.Errorf("invalid reference: %q", pr.Ref.String())) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
panic(fmt.Sprintf("invalid reference: %q", pr.Ref.String()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if objAsParam, ok := obj.(spec.Parameter); ok {
|
|
||||||
pr = objAsParam
|
|
||||||
} else {
|
|
||||||
if callmeOnError != nil {
|
|
||||||
if callmeOnError(param, fmt.Errorf("resolved reference is not a parameter: %q", pr.Ref.String())) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
panic(fmt.Sprintf("resolved reference is not a parameter: %q", pr.Ref.String()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res[mapKeyFromParam(&pr)] = pr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParametersFor the specified operation id.
|
|
||||||
//
|
|
||||||
// Assumes parameters properly resolve references if any and that
|
|
||||||
// such references actually resolve to a parameter object.
|
|
||||||
// Otherwise, panics.
|
|
||||||
func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
|
|
||||||
return s.SafeParametersFor(operationID, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SafeParametersFor the specified operation id.
|
|
||||||
//
|
|
||||||
// Does not assume parameters properly resolve references or that
|
|
||||||
// such references actually resolve to a parameter object.
|
|
||||||
//
|
|
||||||
// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
|
|
||||||
// parameters. If the callback is set to nil, panics upon errors.
|
|
||||||
func (s *Spec) SafeParametersFor(operationID string, callmeOnError ErrorOnParamFunc) []spec.Parameter {
|
|
||||||
gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter {
|
|
||||||
bag := make(map[string]spec.Parameter)
|
|
||||||
s.paramsAsMap(pi.Parameters, bag, callmeOnError)
|
|
||||||
s.paramsAsMap(op.Parameters, bag, callmeOnError)
|
|
||||||
|
|
||||||
var res []spec.Parameter
|
|
||||||
for _, v := range bag {
|
|
||||||
res = append(res, v)
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
for _, pi := range s.spec.Paths.Paths {
|
|
||||||
if pi.Get != nil && pi.Get.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Get)
|
|
||||||
}
|
|
||||||
if pi.Head != nil && pi.Head.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Head)
|
|
||||||
}
|
|
||||||
if pi.Options != nil && pi.Options.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Options)
|
|
||||||
}
|
|
||||||
if pi.Post != nil && pi.Post.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Post)
|
|
||||||
}
|
|
||||||
if pi.Patch != nil && pi.Patch.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Patch)
|
|
||||||
}
|
|
||||||
if pi.Put != nil && pi.Put.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Put)
|
|
||||||
}
|
|
||||||
if pi.Delete != nil && pi.Delete.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Delete)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
|
|
||||||
// apply for the method and path.
|
|
||||||
//
|
|
||||||
// Assumes parameters properly resolve references if any and that
|
|
||||||
// such references actually resolve to a parameter object.
|
|
||||||
// Otherwise, panics.
|
|
||||||
func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter {
|
|
||||||
return s.SafeParamsFor(method, path, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SafeParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
|
|
||||||
// apply for the method and path.
|
|
||||||
//
|
|
||||||
// Does not assume parameters properly resolve references or that
|
|
||||||
// such references actually resolve to a parameter object.
|
|
||||||
//
|
|
||||||
// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
|
|
||||||
// parameters. If the callback is set to nil, panics upon errors.
|
|
||||||
func (s *Spec) SafeParamsFor(method, path string, callmeOnError ErrorOnParamFunc) map[string]spec.Parameter {
|
|
||||||
res := make(map[string]spec.Parameter)
|
|
||||||
if pi, ok := s.spec.Paths.Paths[path]; ok {
|
|
||||||
s.paramsAsMap(pi.Parameters, res, callmeOnError)
|
|
||||||
s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res, callmeOnError)
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationForName gets the operation for the given id
|
|
||||||
func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) {
|
|
||||||
for method, pathItem := range s.operations {
|
|
||||||
for path, op := range pathItem {
|
|
||||||
if operationID == op.ID {
|
|
||||||
return method, path, op, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "", "", nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationFor the given method and path
|
|
||||||
func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) {
|
|
||||||
if mp, ok := s.operations[strings.ToUpper(method)]; ok {
|
|
||||||
op, fn := mp[path]
|
|
||||||
return op, fn
|
|
||||||
}
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Operations gathers all the operations specified in the spec document
|
|
||||||
func (s *Spec) Operations() map[string]map[string]*spec.Operation {
|
|
||||||
return s.operations
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) structMapKeys(mp map[string]struct{}) []string {
|
|
||||||
if len(mp) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
result := make([]string, 0, len(mp))
|
|
||||||
for k := range mp {
|
|
||||||
result = append(result, k)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllPaths returns all the paths in the swagger spec
|
|
||||||
func (s *Spec) AllPaths() map[string]spec.PathItem {
|
|
||||||
if s.spec == nil || s.spec.Paths == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return s.spec.Paths.Paths
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationIDs gets all the operation ids based on method an dpath
|
|
||||||
func (s *Spec) OperationIDs() []string {
|
|
||||||
if len(s.operations) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
result := make([]string, 0, len(s.operations))
|
|
||||||
for method, v := range s.operations {
|
|
||||||
for p, o := range v {
|
|
||||||
if o.ID != "" {
|
|
||||||
result = append(result, o.ID)
|
|
||||||
} else {
|
|
||||||
result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationMethodPaths gets all the operation ids based on method an dpath
|
|
||||||
func (s *Spec) OperationMethodPaths() []string {
|
|
||||||
if len(s.operations) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
result := make([]string, 0, len(s.operations))
|
|
||||||
for method, v := range s.operations {
|
|
||||||
for p := range v {
|
|
||||||
result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequiredConsumes gets all the distinct consumes that are specified in the specification document
|
|
||||||
func (s *Spec) RequiredConsumes() []string {
|
|
||||||
return s.structMapKeys(s.consumes)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequiredProduces gets all the distinct produces that are specified in the specification document
|
|
||||||
func (s *Spec) RequiredProduces() []string {
|
|
||||||
return s.structMapKeys(s.produces)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec
|
|
||||||
func (s *Spec) RequiredSecuritySchemes() []string {
|
|
||||||
return s.structMapKeys(s.authSchemes)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SchemaRef is a reference to a schema
|
|
||||||
type SchemaRef struct {
|
|
||||||
Name string
|
|
||||||
Ref spec.Ref
|
|
||||||
Schema *spec.Schema
|
|
||||||
TopLevel bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// SchemasWithAllOf returns schema references to all schemas that are defined
|
|
||||||
// with an allOf key
|
|
||||||
func (s *Spec) SchemasWithAllOf() (result []SchemaRef) {
|
|
||||||
for _, v := range s.allOfs {
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllDefinitions returns schema references for all the definitions that were discovered
|
|
||||||
func (s *Spec) AllDefinitions() (result []SchemaRef) {
|
|
||||||
for _, v := range s.allSchemas {
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllDefinitionReferences returns json refs for all the discovered schemas
|
|
||||||
func (s *Spec) AllDefinitionReferences() (result []string) {
|
|
||||||
for _, v := range s.references.schemas {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllParameterReferences returns json refs for all the discovered parameters
|
|
||||||
func (s *Spec) AllParameterReferences() (result []string) {
|
|
||||||
for _, v := range s.references.parameters {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllResponseReferences returns json refs for all the discovered responses
|
|
||||||
func (s *Spec) AllResponseReferences() (result []string) {
|
|
||||||
for _, v := range s.references.responses {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllPathItemReferences returns the references for all the items
|
|
||||||
func (s *Spec) AllPathItemReferences() (result []string) {
|
|
||||||
for _, v := range s.references.pathItems {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllItemsReferences returns the references for all the items in simple schemas (parameters or headers).
|
|
||||||
//
|
|
||||||
// NOTE: since Swagger 2.0 forbids $ref in simple params, this should always yield an empty slice for a valid
|
|
||||||
// Swagger 2.0 spec.
|
|
||||||
func (s *Spec) AllItemsReferences() (result []string) {
|
|
||||||
for _, v := range s.references.items {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllReferences returns all the references found in the document, with possible duplicates
|
|
||||||
func (s *Spec) AllReferences() (result []string) {
|
|
||||||
for _, v := range s.references.allRefs {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllRefs returns all the unique references found in the document
|
|
||||||
func (s *Spec) AllRefs() (result []spec.Ref) {
|
|
||||||
set := make(map[string]struct{})
|
|
||||||
for _, v := range s.references.allRefs {
|
|
||||||
a := v.String()
|
|
||||||
if a == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := set[a]; !ok {
|
|
||||||
set[a] = struct{}{}
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func cloneStringMap(source map[string]string) map[string]string {
|
|
||||||
res := make(map[string]string, len(source))
|
|
||||||
for k, v := range source {
|
|
||||||
res[k] = v
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParameterPatterns returns all the patterns found in parameters
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) ParameterPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.parameters)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HeaderPatterns returns all the patterns found in response headers
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) HeaderPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.headers)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ItemsPatterns returns all the patterns found in simple array items
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) ItemsPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.items)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SchemaPatterns returns all the patterns found in schemas
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) SchemaPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.schemas)
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllPatterns returns all the patterns found in the spec
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) AllPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.allPatterns)
|
|
||||||
}
|
|
47
vendor/github.com/go-openapi/analysis/debug.go
generated
vendored
47
vendor/github.com/go-openapi/analysis/debug.go
generated
vendored
|
@ -1,47 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// Debug is true when the SWAGGER_DEBUG env var is not empty.
|
|
||||||
// It enables a more verbose logging of the spec analyzer.
|
|
||||||
Debug = os.Getenv("SWAGGER_DEBUG") != ""
|
|
||||||
// analysisLogger is a debug logger for this package
|
|
||||||
analysisLogger *log.Logger
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
debugOptions()
|
|
||||||
}
|
|
||||||
|
|
||||||
func debugOptions() {
|
|
||||||
analysisLogger = log.New(os.Stdout, "analysis:", log.LstdFlags)
|
|
||||||
}
|
|
||||||
|
|
||||||
func debugLog(msg string, args ...interface{}) {
|
|
||||||
// A private, trivial trace logger, based on go-openapi/spec/expander.go:debugLog()
|
|
||||||
if Debug {
|
|
||||||
_, file1, pos1, _ := runtime.Caller(1)
|
|
||||||
analysisLogger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...))
|
|
||||||
}
|
|
||||||
}
|
|
43
vendor/github.com/go-openapi/analysis/doc.go
generated
vendored
43
vendor/github.com/go-openapi/analysis/doc.go
generated
vendored
|
@ -1,43 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
/*
|
|
||||||
Package analysis provides methods to work with a Swagger specification document from
|
|
||||||
package go-openapi/spec.
|
|
||||||
|
|
||||||
Analyzing a specification
|
|
||||||
|
|
||||||
An analysed specification object (type Spec) provides methods to work with swagger definition.
|
|
||||||
|
|
||||||
Flattening or expanding a specification
|
|
||||||
|
|
||||||
Flattening a specification bundles all remote $ref in the main spec document.
|
|
||||||
Depending on flattening options, additional preprocessing may take place:
|
|
||||||
- full flattening: replacing all inline complex constructs by a named entry in #/definitions
|
|
||||||
- expand: replace all $ref's in the document by their expanded content
|
|
||||||
|
|
||||||
Merging several specifications
|
|
||||||
|
|
||||||
Mixin several specifications merges all Swagger constructs, and warns about found conflicts.
|
|
||||||
|
|
||||||
Fixing a specification
|
|
||||||
|
|
||||||
Unmarshalling a specification with golang json unmarshalling may lead to
|
|
||||||
some unwanted result on present but empty fields.
|
|
||||||
|
|
||||||
Analyzing a Swagger schema
|
|
||||||
|
|
||||||
Swagger schemas are analyzed to determine their complexity and qualify their content.
|
|
||||||
*/
|
|
||||||
package analysis
|
|
76
vendor/github.com/go-openapi/analysis/fixer.go
generated
vendored
76
vendor/github.com/go-openapi/analysis/fixer.go
generated
vendored
|
@ -1,76 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package analysis
|
|
||||||
|
|
||||||
import "github.com/go-openapi/spec"
|
|
||||||
|
|
||||||
// FixEmptyResponseDescriptions replaces empty ("") response
|
|
||||||
// descriptions in the input with "(empty)" to ensure that the
|
|
||||||
// resulting Swagger is stays valid. The problem appears to arise
|
|
||||||
// from reading in valid specs that have a explicit response
|
|
||||||
// description of "" (valid, response.description is required), but
|
|
||||||
// due to zero values being omitted upon re-serializing (omitempty) we
|
|
||||||
// lose them unless we stick some chars in there.
|
|
||||||
func FixEmptyResponseDescriptions(s *spec.Swagger) {
|
|
||||||
if s.Paths != nil {
|
|
||||||
for _, v := range s.Paths.Paths {
|
|
||||||
if v.Get != nil {
|
|
||||||
FixEmptyDescs(v.Get.Responses)
|
|
||||||
}
|
|
||||||
if v.Put != nil {
|
|
||||||
FixEmptyDescs(v.Put.Responses)
|
|
||||||
}
|
|
||||||
if v.Post != nil {
|
|
||||||
FixEmptyDescs(v.Post.Responses)
|
|
||||||
}
|
|
||||||
if v.Delete != nil {
|
|
||||||
FixEmptyDescs(v.Delete.Responses)
|
|
||||||
}
|
|
||||||
if v.Options != nil {
|
|
||||||
FixEmptyDescs(v.Options.Responses)
|
|
||||||
}
|
|
||||||
if v.Head != nil {
|
|
||||||
FixEmptyDescs(v.Head.Responses)
|
|
||||||
}
|
|
||||||
if v.Patch != nil {
|
|
||||||
FixEmptyDescs(v.Patch.Responses)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for k, v := range s.Responses {
|
|
||||||
FixEmptyDesc(&v)
|
|
||||||
s.Responses[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FixEmptyDescs adds "(empty)" as the description for any Response in
|
|
||||||
// the given Responses object that doesn't already have one.
|
|
||||||
func FixEmptyDescs(rs *spec.Responses) {
|
|
||||||
FixEmptyDesc(rs.Default)
|
|
||||||
for k, v := range rs.StatusCodeResponses {
|
|
||||||
FixEmptyDesc(&v)
|
|
||||||
rs.StatusCodeResponses[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FixEmptyDesc adds "(empty)" as the description to the given
|
|
||||||
// Response object if it doesn't already have one and isn't a
|
|
||||||
// ref. No-op on nil input.
|
|
||||||
func FixEmptyDesc(rs *spec.Response) {
|
|
||||||
if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
rs.Description = "(empty)"
|
|
||||||
}
|
|
1500
vendor/github.com/go-openapi/analysis/flatten.go
generated
vendored
1500
vendor/github.com/go-openapi/analysis/flatten.go
generated
vendored
File diff suppressed because it is too large
Load diff
10
vendor/github.com/go-openapi/analysis/go.mod
generated
vendored
10
vendor/github.com/go-openapi/analysis/go.mod
generated
vendored
|
@ -1,10 +0,0 @@
|
||||||
module github.com/go-openapi/analysis
|
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/go-openapi/jsonpointer v0.17.0
|
|
||||||
github.com/go-openapi/loads v0.17.0
|
|
||||||
github.com/go-openapi/spec v0.17.0
|
|
||||||
github.com/go-openapi/strfmt v0.17.0
|
|
||||||
github.com/go-openapi/swag v0.17.0
|
|
||||||
github.com/stretchr/testify v1.2.2
|
|
||||||
)
|
|
37
vendor/github.com/go-openapi/analysis/go.sum
generated
vendored
37
vendor/github.com/go-openapi/analysis/go.sum
generated
vendored
|
@ -1,37 +0,0 @@
|
||||||
github.com/PuerkitoBio/purell v1.1.0 h1:rmGxhojJlM0tuKtfdvliR84CFHljx9ag64t2xmVkjK4=
|
|
||||||
github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
|
||||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
|
|
||||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
|
||||||
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf h1:eg0MeVzsP1G42dRafH3vf+al2vQIJU0YHX+1Tw87oco=
|
|
||||||
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb h1:D4uzjWwKYQ5XnAvUbuvHW93esHg7F8N/OYeBBcJoTr0=
|
|
||||||
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
|
||||||
github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI=
|
|
||||||
github.com/go-openapi/errors v0.17.0 h1:47T+LqPrQUxFXQnB22aLBfsTRFSqWp5y4OiFgQm+/Lw=
|
|
||||||
github.com/go-openapi/errors v0.17.0/go.mod h1:La0D2x9HoXenv7MDEiAv6vWoe84CXFo0PQRk/jdQlww=
|
|
||||||
github.com/go-openapi/jsonpointer v0.17.0 h1:Bpl2DtZ6k7wKqfFs7e+4P08+M9I3FQgn09a1UsRUQbk=
|
|
||||||
github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
|
|
||||||
github.com/go-openapi/jsonreference v0.17.0 h1:d/o7/fsLWWQZACbihvZxcyLQ59jfUVs7WOJv/ak7T7A=
|
|
||||||
github.com/go-openapi/jsonreference v0.17.0/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
|
|
||||||
github.com/go-openapi/loads v0.17.0 h1:H22nMs3GDQk4SwAaFQ+jLNw+0xoFeCueawhZlv8MBYs=
|
|
||||||
github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
|
|
||||||
github.com/go-openapi/spec v0.17.0 h1:MM5YaXBdBOEcjGHW5WayrAY5Ze2ydNyy71JHeTi7xUc=
|
|
||||||
github.com/go-openapi/spec v0.17.0/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
|
|
||||||
github.com/go-openapi/strfmt v0.17.0 h1:79+bCyGHowS3rkr6z8RcG5jVzdKpeKXlDuW6yqE50TM=
|
|
||||||
github.com/go-openapi/strfmt v0.17.0/go.mod h1:/bCWipNKhC9QMhD8HRe2EGbU8G0D4Yvh0G6X4k1Xwvg=
|
|
||||||
github.com/go-openapi/swag v0.17.0 h1:7wu+dZ5k83kvUWeAb+WUkFiUhDzwGqzTR/NhWzeo1JU=
|
|
||||||
github.com/go-openapi/swag v0.17.0/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
|
|
||||||
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic=
|
|
||||||
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
|
||||||
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
|
||||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
|
||||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE=
|
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
29
vendor/github.com/go-openapi/analysis/internal/post_go18.go
generated
vendored
29
vendor/github.com/go-openapi/analysis/internal/post_go18.go
generated
vendored
|
@ -1,29 +0,0 @@
|
||||||
// +build go1.8
|
|
||||||
|
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package internal
|
|
||||||
|
|
||||||
import "net/url"
|
|
||||||
|
|
||||||
// PathUnescape provides url.PathUnescape(), with seamless
|
|
||||||
// go version support for pre-go1.8
|
|
||||||
//
|
|
||||||
// TODO: this function is currently defined in go-openapi/swag,
|
|
||||||
// but unexported. We might chose to export it, or simple phase
|
|
||||||
// out pre-go1.8 support.
|
|
||||||
func PathUnescape(path string) (string, error) {
|
|
||||||
return url.PathUnescape(path)
|
|
||||||
}
|
|
29
vendor/github.com/go-openapi/analysis/internal/pre_go18.go
generated
vendored
29
vendor/github.com/go-openapi/analysis/internal/pre_go18.go
generated
vendored
|
@ -1,29 +0,0 @@
|
||||||
// +build !go1.8
|
|
||||||
|
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package internal
|
|
||||||
|
|
||||||
import "net/url"
|
|
||||||
|
|
||||||
// PathUnescape provides url.PathUnescape(), with seamless
|
|
||||||
// go version support for pre-go1.8
|
|
||||||
//
|
|
||||||
// TODO: this function is currently defined in go-openapi/swag,
|
|
||||||
// but unexported. We might chose to export it, or simple phase
|
|
||||||
// out pre-go1.8 support.
|
|
||||||
func PathUnescape(path string) (string, error) {
|
|
||||||
return url.QueryUnescape(path)
|
|
||||||
}
|
|
334
vendor/github.com/go-openapi/analysis/mixin.go
generated
vendored
334
vendor/github.com/go-openapi/analysis/mixin.go
generated
vendored
|
@ -1,334 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Mixin modifies the primary swagger spec by adding the paths and
|
|
||||||
// definitions from the mixin specs. Top level parameters and
|
|
||||||
// responses from the mixins are also carried over. Operation id
|
|
||||||
// collisions are avoided by appending "Mixin<N>" but only if
|
|
||||||
// needed.
|
|
||||||
//
|
|
||||||
// The following parts of primary are never modified by merging:
|
|
||||||
// - Info
|
|
||||||
// - BasePath
|
|
||||||
// - Host
|
|
||||||
// - ExternalDocs
|
|
||||||
//
|
|
||||||
// Consider calling FixEmptyResponseDescriptions() on the modified primary
|
|
||||||
// if you read them from storage and they are valid to start with.
|
|
||||||
//
|
|
||||||
// Entries in "paths", "definitions", "parameters" and "responses" are
|
|
||||||
// added to the primary in the order of the given mixins. If the entry
|
|
||||||
// already exists in primary it is skipped with a warning message.
|
|
||||||
//
|
|
||||||
// The count of skipped entries (from collisions) is returned so any
|
|
||||||
// deviation from the number expected can flag a warning in your build
|
|
||||||
// scripts. Carefully review the collisions before accepting them;
|
|
||||||
// consider renaming things if possible.
|
|
||||||
//
|
|
||||||
// No key normalization takes place (paths, type defs,
|
|
||||||
// etc). Ensure they are canonical if your downstream tools do
|
|
||||||
// key normalization of any form.
|
|
||||||
//
|
|
||||||
// Merging schemes (http, https), and consumers/producers do not account for
|
|
||||||
// collisions.
|
|
||||||
func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
|
|
||||||
skipped := make([]string, 0, len(mixins))
|
|
||||||
opIds := getOpIds(primary)
|
|
||||||
initPrimary(primary)
|
|
||||||
|
|
||||||
for i, m := range mixins {
|
|
||||||
skipped = append(skipped, mergeConsumes(primary, m)...)
|
|
||||||
|
|
||||||
skipped = append(skipped, mergeProduces(primary, m)...)
|
|
||||||
|
|
||||||
skipped = append(skipped, mergeTags(primary, m)...)
|
|
||||||
|
|
||||||
skipped = append(skipped, mergeSchemes(primary, m)...)
|
|
||||||
|
|
||||||
skipped = append(skipped, mergeSecurityDefinitions(primary, m)...)
|
|
||||||
|
|
||||||
skipped = append(skipped, mergeSecurityRequirements(primary, m)...)
|
|
||||||
|
|
||||||
skipped = append(skipped, mergeDefinitions(primary, m)...)
|
|
||||||
|
|
||||||
// merging paths requires a map of operationIDs to work with
|
|
||||||
skipped = append(skipped, mergePaths(primary, m, opIds, i)...)
|
|
||||||
|
|
||||||
skipped = append(skipped, mergeParameters(primary, m)...)
|
|
||||||
|
|
||||||
skipped = append(skipped, mergeResponses(primary, m)...)
|
|
||||||
}
|
|
||||||
return skipped
|
|
||||||
}
|
|
||||||
|
|
||||||
// getOpIds extracts all the paths.<path>.operationIds from the given
|
|
||||||
// spec and returns them as the keys in a map with 'true' values.
|
|
||||||
func getOpIds(s *spec.Swagger) map[string]bool {
|
|
||||||
rv := make(map[string]bool)
|
|
||||||
if s.Paths == nil {
|
|
||||||
return rv
|
|
||||||
}
|
|
||||||
for _, v := range s.Paths.Paths {
|
|
||||||
piops := pathItemOps(v)
|
|
||||||
for _, op := range piops {
|
|
||||||
rv[op.ID] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return rv
|
|
||||||
}
|
|
||||||
|
|
||||||
func pathItemOps(p spec.PathItem) []*spec.Operation {
|
|
||||||
var rv []*spec.Operation
|
|
||||||
rv = appendOp(rv, p.Get)
|
|
||||||
rv = appendOp(rv, p.Put)
|
|
||||||
rv = appendOp(rv, p.Post)
|
|
||||||
rv = appendOp(rv, p.Delete)
|
|
||||||
rv = appendOp(rv, p.Head)
|
|
||||||
rv = appendOp(rv, p.Patch)
|
|
||||||
return rv
|
|
||||||
}
|
|
||||||
|
|
||||||
func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
|
|
||||||
if op == nil {
|
|
||||||
return ops
|
|
||||||
}
|
|
||||||
return append(ops, op)
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeSecurityDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
|
||||||
for k, v := range m.SecurityDefinitions {
|
|
||||||
if _, exists := primary.SecurityDefinitions[k]; exists {
|
|
||||||
warn := fmt.Sprintf(
|
|
||||||
"SecurityDefinitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.SecurityDefinitions[k] = v
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeSecurityRequirements(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
|
||||||
for _, v := range m.Security {
|
|
||||||
found := false
|
|
||||||
for _, vv := range primary.Security {
|
|
||||||
if reflect.DeepEqual(v, vv) {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if found {
|
|
||||||
warn := fmt.Sprintf(
|
|
||||||
"Security requirement: '%v' already exists in primary or higher priority mixin, skipping\n", v)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Security = append(primary.Security, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
|
||||||
for k, v := range m.Definitions {
|
|
||||||
// assume name collisions represent IDENTICAL type. careful.
|
|
||||||
if _, exists := primary.Definitions[k]; exists {
|
|
||||||
warn := fmt.Sprintf(
|
|
||||||
"definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Definitions[k] = v
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIds map[string]bool, mixIndex int) (skipped []string) {
|
|
||||||
if m.Paths != nil {
|
|
||||||
for k, v := range m.Paths.Paths {
|
|
||||||
if _, exists := primary.Paths.Paths[k]; exists {
|
|
||||||
warn := fmt.Sprintf(
|
|
||||||
"paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swagger requires that operationIds be
|
|
||||||
// unique within a spec. If we find a
|
|
||||||
// collision we append "Mixin0" to the
|
|
||||||
// operatoinId we are adding, where 0 is mixin
|
|
||||||
// index. We assume that operationIds with
|
|
||||||
// all the proivded specs are already unique.
|
|
||||||
piops := pathItemOps(v)
|
|
||||||
for _, piop := range piops {
|
|
||||||
if opIds[piop.ID] {
|
|
||||||
piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", mixIndex)
|
|
||||||
}
|
|
||||||
opIds[piop.ID] = true
|
|
||||||
}
|
|
||||||
primary.Paths.Paths[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeParameters(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
|
||||||
for k, v := range m.Parameters {
|
|
||||||
// could try to rename on conflict but would
|
|
||||||
// have to fix $refs in the mixin. Complain
|
|
||||||
// for now
|
|
||||||
if _, exists := primary.Parameters[k]; exists {
|
|
||||||
warn := fmt.Sprintf(
|
|
||||||
"top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Parameters[k] = v
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeResponses(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
|
||||||
for k, v := range m.Responses {
|
|
||||||
// could try to rename on conflict but would
|
|
||||||
// have to fix $refs in the mixin. Complain
|
|
||||||
// for now
|
|
||||||
if _, exists := primary.Responses[k]; exists {
|
|
||||||
warn := fmt.Sprintf(
|
|
||||||
"top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Responses[k] = v
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeConsumes(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
|
||||||
for _, v := range m.Consumes {
|
|
||||||
found := false
|
|
||||||
for _, vv := range primary.Consumes {
|
|
||||||
if v == vv {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if found {
|
|
||||||
// no warning here: we just skip it
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Consumes = append(primary.Consumes, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeProduces(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
|
||||||
for _, v := range m.Produces {
|
|
||||||
found := false
|
|
||||||
for _, vv := range primary.Produces {
|
|
||||||
if v == vv {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if found {
|
|
||||||
// no warning here: we just skip it
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Produces = append(primary.Produces, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeTags(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
|
||||||
for _, v := range m.Tags {
|
|
||||||
found := false
|
|
||||||
for _, vv := range primary.Tags {
|
|
||||||
if v.Name == vv.Name {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if found {
|
|
||||||
warn := fmt.Sprintf(
|
|
||||||
"top level tags entry with name '%v' already exists in primary or higher priority mixin, skipping\n", v.Name)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Tags = append(primary.Tags, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeSchemes(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
|
|
||||||
for _, v := range m.Schemes {
|
|
||||||
found := false
|
|
||||||
for _, vv := range primary.Schemes {
|
|
||||||
if v == vv {
|
|
||||||
found = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if found {
|
|
||||||
// no warning here: we just skip it
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Schemes = append(primary.Schemes, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func initPrimary(primary *spec.Swagger) {
|
|
||||||
if primary.SecurityDefinitions == nil {
|
|
||||||
primary.SecurityDefinitions = make(map[string]*spec.SecurityScheme)
|
|
||||||
}
|
|
||||||
if primary.Security == nil {
|
|
||||||
primary.Security = make([]map[string][]string, 0, 10)
|
|
||||||
}
|
|
||||||
if primary.Produces == nil {
|
|
||||||
primary.Produces = make([]string, 0, 10)
|
|
||||||
}
|
|
||||||
if primary.Consumes == nil {
|
|
||||||
primary.Consumes = make([]string, 0, 10)
|
|
||||||
}
|
|
||||||
if primary.Tags == nil {
|
|
||||||
primary.Tags = make([]spec.Tag, 0, 10)
|
|
||||||
}
|
|
||||||
if primary.Schemes == nil {
|
|
||||||
primary.Schemes = make([]string, 0, 10)
|
|
||||||
}
|
|
||||||
if primary.Paths == nil {
|
|
||||||
primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
|
|
||||||
}
|
|
||||||
if primary.Paths.Paths == nil {
|
|
||||||
primary.Paths.Paths = make(map[string]spec.PathItem)
|
|
||||||
}
|
|
||||||
if primary.Definitions == nil {
|
|
||||||
primary.Definitions = make(spec.Definitions)
|
|
||||||
}
|
|
||||||
if primary.Parameters == nil {
|
|
||||||
primary.Parameters = make(map[string]spec.Parameter)
|
|
||||||
}
|
|
||||||
if primary.Responses == nil {
|
|
||||||
primary.Responses = make(map[string]spec.Response)
|
|
||||||
}
|
|
||||||
}
|
|
234
vendor/github.com/go-openapi/analysis/schema.go
generated
vendored
234
vendor/github.com/go-openapi/analysis/schema.go
generated
vendored
|
@ -1,234 +0,0 @@
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
"github.com/go-openapi/strfmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SchemaOpts configures the schema analyzer
|
|
||||||
type SchemaOpts struct {
|
|
||||||
Schema *spec.Schema
|
|
||||||
Root interface{}
|
|
||||||
BasePath string
|
|
||||||
_ struct{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Schema analysis, will classify the schema according to known
|
|
||||||
// patterns.
|
|
||||||
func Schema(opts SchemaOpts) (*AnalyzedSchema, error) {
|
|
||||||
a := &AnalyzedSchema{
|
|
||||||
schema: opts.Schema,
|
|
||||||
root: opts.Root,
|
|
||||||
basePath: opts.BasePath,
|
|
||||||
}
|
|
||||||
|
|
||||||
a.initializeFlags()
|
|
||||||
a.inferKnownType()
|
|
||||||
a.inferEnum()
|
|
||||||
a.inferBaseType()
|
|
||||||
|
|
||||||
if err := a.inferMap(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := a.inferArray(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := a.inferTuple(); err != nil {
|
|
||||||
// NOTE(fredbi): currently, inferTuple() never returns an error
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := a.inferFromRef(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
a.inferSimpleSchema()
|
|
||||||
return a, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// AnalyzedSchema indicates what the schema represents
|
|
||||||
type AnalyzedSchema struct {
|
|
||||||
schema *spec.Schema
|
|
||||||
root interface{}
|
|
||||||
basePath string
|
|
||||||
|
|
||||||
hasProps bool
|
|
||||||
hasAllOf bool
|
|
||||||
hasItems bool
|
|
||||||
hasAdditionalProps bool
|
|
||||||
hasAdditionalItems bool
|
|
||||||
hasRef bool
|
|
||||||
|
|
||||||
IsKnownType bool
|
|
||||||
IsSimpleSchema bool
|
|
||||||
IsArray bool
|
|
||||||
IsSimpleArray bool
|
|
||||||
IsMap bool
|
|
||||||
IsSimpleMap bool
|
|
||||||
IsExtendedObject bool
|
|
||||||
IsTuple bool
|
|
||||||
IsTupleWithExtra bool
|
|
||||||
IsBaseType bool
|
|
||||||
IsEnum bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Inherits copies value fields from other onto this schema
|
|
||||||
func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) {
|
|
||||||
if other == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
a.hasProps = other.hasProps
|
|
||||||
a.hasAllOf = other.hasAllOf
|
|
||||||
a.hasItems = other.hasItems
|
|
||||||
a.hasAdditionalItems = other.hasAdditionalItems
|
|
||||||
a.hasAdditionalProps = other.hasAdditionalProps
|
|
||||||
a.hasRef = other.hasRef
|
|
||||||
|
|
||||||
a.IsKnownType = other.IsKnownType
|
|
||||||
a.IsSimpleSchema = other.IsSimpleSchema
|
|
||||||
a.IsArray = other.IsArray
|
|
||||||
a.IsSimpleArray = other.IsSimpleArray
|
|
||||||
a.IsMap = other.IsMap
|
|
||||||
a.IsSimpleMap = other.IsSimpleMap
|
|
||||||
a.IsExtendedObject = other.IsExtendedObject
|
|
||||||
a.IsTuple = other.IsTuple
|
|
||||||
a.IsTupleWithExtra = other.IsTupleWithExtra
|
|
||||||
a.IsBaseType = other.IsBaseType
|
|
||||||
a.IsEnum = other.IsEnum
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) inferFromRef() error {
|
|
||||||
if a.hasRef {
|
|
||||||
sch := new(spec.Schema)
|
|
||||||
sch.Ref = a.schema.Ref
|
|
||||||
err := spec.ExpandSchema(sch, a.root, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if sch != nil {
|
|
||||||
// NOTE(fredbi): currently the only cause for errors in
|
|
||||||
// unresolved ref. Since spec.ExpandSchema() expands the
|
|
||||||
// schema recursively, there is no chance to get there,
|
|
||||||
// until we add more causes for error in this schema analysis.
|
|
||||||
rsch, err := Schema(SchemaOpts{
|
|
||||||
Schema: sch,
|
|
||||||
Root: a.root,
|
|
||||||
BasePath: a.basePath,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
a.inherits(rsch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) inferSimpleSchema() {
|
|
||||||
a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) inferKnownType() {
|
|
||||||
tpe := a.schema.Type
|
|
||||||
format := a.schema.Format
|
|
||||||
a.IsKnownType = tpe.Contains("boolean") ||
|
|
||||||
tpe.Contains("integer") ||
|
|
||||||
tpe.Contains("number") ||
|
|
||||||
tpe.Contains("string") ||
|
|
||||||
(format != "" && strfmt.Default.ContainsName(format)) ||
|
|
||||||
(a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) inferMap() error {
|
|
||||||
if a.isObjectType() {
|
|
||||||
hasExtra := a.hasProps || a.hasAllOf
|
|
||||||
a.IsMap = a.hasAdditionalProps && !hasExtra
|
|
||||||
a.IsExtendedObject = a.hasAdditionalProps && hasExtra
|
|
||||||
if a.IsMap {
|
|
||||||
if a.schema.AdditionalProperties.Schema != nil {
|
|
||||||
msch, err := Schema(SchemaOpts{
|
|
||||||
Schema: a.schema.AdditionalProperties.Schema,
|
|
||||||
Root: a.root,
|
|
||||||
BasePath: a.basePath,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
a.IsSimpleMap = msch.IsSimpleSchema
|
|
||||||
} else if a.schema.AdditionalProperties.Allows {
|
|
||||||
a.IsSimpleMap = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) inferArray() error {
|
|
||||||
// an array has Items defined as an object schema, otherwise we qualify this JSON array as a tuple
|
|
||||||
// (yes, even if the Items array contains only one element).
|
|
||||||
// arrays in JSON schema may be unrestricted (i.e no Items specified).
|
|
||||||
// Note that arrays in Swagger MUST have Items. Nonetheless, we analyze unrestricted arrays.
|
|
||||||
//
|
|
||||||
// NOTE: the spec package misses the distinction between:
|
|
||||||
// items: [] and items: {}, so we consider both arrays here.
|
|
||||||
a.IsArray = a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Schemas == nil)
|
|
||||||
if a.IsArray && a.hasItems {
|
|
||||||
if a.schema.Items.Schema != nil {
|
|
||||||
itsch, err := Schema(SchemaOpts{
|
|
||||||
Schema: a.schema.Items.Schema,
|
|
||||||
Root: a.root,
|
|
||||||
BasePath: a.basePath,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
a.IsSimpleArray = itsch.IsSimpleSchema
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if a.IsArray && !a.hasItems {
|
|
||||||
a.IsSimpleArray = true
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) inferTuple() error {
|
|
||||||
tuple := a.hasItems && a.schema.Items.Schemas != nil
|
|
||||||
a.IsTuple = tuple && !a.hasAdditionalItems
|
|
||||||
a.IsTupleWithExtra = tuple && a.hasAdditionalItems
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) inferBaseType() {
|
|
||||||
if a.isObjectType() {
|
|
||||||
a.IsBaseType = a.schema.Discriminator != ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) inferEnum() {
|
|
||||||
a.IsEnum = len(a.schema.Enum) > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) initializeFlags() {
|
|
||||||
a.hasProps = len(a.schema.Properties) > 0
|
|
||||||
a.hasAllOf = len(a.schema.AllOf) > 0
|
|
||||||
a.hasRef = a.schema.Ref.String() != ""
|
|
||||||
|
|
||||||
a.hasItems = a.schema.Items != nil &&
|
|
||||||
(a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0)
|
|
||||||
|
|
||||||
a.hasAdditionalProps = a.schema.AdditionalProperties != nil &&
|
|
||||||
(a.schema.AdditionalProperties != nil || a.schema.AdditionalProperties.Allows)
|
|
||||||
|
|
||||||
a.hasAdditionalItems = a.schema.AdditionalItems != nil &&
|
|
||||||
(a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) isObjectType() bool {
|
|
||||||
return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object"))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *AnalyzedSchema) isArrayType() bool {
|
|
||||||
return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array"))
|
|
||||||
}
|
|
2
vendor/github.com/go-openapi/errors/.gitignore
generated
vendored
2
vendor/github.com/go-openapi/errors/.gitignore
generated
vendored
|
@ -1,2 +0,0 @@
|
||||||
secrets.yml
|
|
||||||
coverage.out
|
|
14
vendor/github.com/go-openapi/errors/.travis.yml
generated
vendored
14
vendor/github.com/go-openapi/errors/.travis.yml
generated
vendored
|
@ -1,14 +0,0 @@
|
||||||
after_success:
|
|
||||||
- bash <(curl -s https://codecov.io/bash)
|
|
||||||
go:
|
|
||||||
- '1.9'
|
|
||||||
- 1.10.x
|
|
||||||
- 1.11.x
|
|
||||||
install:
|
|
||||||
- go get -u github.com/stretchr/testify/assert
|
|
||||||
language: go
|
|
||||||
notifications:
|
|
||||||
slack:
|
|
||||||
secure: gZGp9NaHxi7zawlXJXKY92BGeDR1x0tbIcTyU5nMKLq0fhIaiEBJEeALwZ4VgqsSv3DytSSF5mLH8fevAM3ixE6hxjKQ+lQuf7V/w3btCN1CSWgoua5LOh1kTnqZQtJuRvO4pzoJcT3bJWBsVZ07VGNVzzJEy/zAKCHFqBUCXShw7QemlLBcYWFNqveTlvDIfCzvouoLnPoXwxEpkjxe9uz/ZKZgAnup/fXjC8RFctmgCnkCyvJTk0Y/fZCsufixJrJhshBWTnlrFCzRmgNkz2d+i1Ls3+MJ5EJJ2Tx/A5S63dL49J1f9Kr0AKHADmulSy8JNzIckKwbyFMYUecrsW+Lsu9DhnVMy1jj5pKsJDLRi2iIU3fXTMWbcyQbXjbbnBO2mPdP3Tzme75y4D9fc8hUPeyqVv2BU26NEbQ7EF2pKJ93OXvci7HlwRBgdJa8j6mP2LEDClcPQW00g7N/OZe0cTOMa8L5AwiBlbArwqt9wv6YLJoTG0wpDhzWsFvbCg5bJxe28Yn3fIDD0Lk1I7iSnBbp/5gzF19jmxqvcT8tHRkDL4xfjbENFTZjA5uB4Z4pj4WSyWQILLV/Jwhe3fi9uQwdviFHfj5pnVrmNUiGSOQL672K5wl2c3E9mGwejvsu2dfEz28n7Y/FUnOpY3/cBS0n27JJaerS0zMKNLE=
|
|
||||||
script:
|
|
||||||
- go test -v -race -cover -coverprofile=coverage.txt -covermode=atomic ./...
|
|
74
vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
generated
vendored
74
vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
generated
vendored
|
@ -1,74 +0,0 @@
|
||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
In the interest of fostering an open and welcoming environment, we as
|
|
||||||
contributors and maintainers pledge to making participation in our project and
|
|
||||||
our community a harassment-free experience for everyone, regardless of age, body
|
|
||||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
|
||||||
nationality, personal appearance, race, religion, or sexual identity and
|
|
||||||
orientation.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to creating a positive environment
|
|
||||||
include:
|
|
||||||
|
|
||||||
* Using welcoming and inclusive language
|
|
||||||
* Being respectful of differing viewpoints and experiences
|
|
||||||
* Gracefully accepting constructive criticism
|
|
||||||
* Focusing on what is best for the community
|
|
||||||
* Showing empathy towards other community members
|
|
||||||
|
|
||||||
Examples of unacceptable behavior by participants include:
|
|
||||||
|
|
||||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
|
||||||
advances
|
|
||||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
|
||||||
* Public or private harassment
|
|
||||||
* Publishing others' private information, such as a physical or electronic
|
|
||||||
address, without explicit permission
|
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
|
||||||
professional setting
|
|
||||||
|
|
||||||
## Our Responsibilities
|
|
||||||
|
|
||||||
Project maintainers are responsible for clarifying the standards of acceptable
|
|
||||||
behavior and are expected to take appropriate and fair corrective action in
|
|
||||||
response to any instances of unacceptable behavior.
|
|
||||||
|
|
||||||
Project maintainers have the right and responsibility to remove, edit, or
|
|
||||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
|
||||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
|
||||||
permanently any contributor for other behaviors that they deem inappropriate,
|
|
||||||
threatening, offensive, or harmful.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies both within project spaces and in public spaces
|
|
||||||
when an individual is representing the project or its community. Examples of
|
|
||||||
representing a project or community include using an official project e-mail
|
|
||||||
address, posting via an official social media account, or acting as an appointed
|
|
||||||
representative at an online or offline event. Representation of a project may be
|
|
||||||
further defined and clarified by project maintainers.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
|
||||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
|
||||||
complaints will be reviewed and investigated and will result in a response that
|
|
||||||
is deemed necessary and appropriate to the circumstances. The project team is
|
|
||||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
|
||||||
Further details of specific enforcement policies may be posted separately.
|
|
||||||
|
|
||||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
|
||||||
faith may face temporary or permanent repercussions as determined by other
|
|
||||||
members of the project's leadership.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
|
||||||
available at [http://contributor-covenant.org/version/1/4][version]
|
|
||||||
|
|
||||||
[homepage]: http://contributor-covenant.org
|
|
||||||
[version]: http://contributor-covenant.org/version/1/4/
|
|
202
vendor/github.com/go-openapi/errors/LICENSE
generated
vendored
202
vendor/github.com/go-openapi/errors/LICENSE
generated
vendored
|
@ -1,202 +0,0 @@
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
8
vendor/github.com/go-openapi/errors/README.md
generated
vendored
8
vendor/github.com/go-openapi/errors/README.md
generated
vendored
|
@ -1,8 +0,0 @@
|
||||||
# OpenAPI errors [![Build Status](https://travis-ci.org/go-openapi/errors.svg?branch=master)](https://travis-ci.org/go-openapi/errors) [![codecov](https://codecov.io/gh/go-openapi/errors/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/errors) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
|
|
||||||
|
|
||||||
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/errors/master/LICENSE)
|
|
||||||
[![GoDoc](https://godoc.org/github.com/go-openapi/errors?status.svg)](http://godoc.org/github.com/go-openapi/errors)
|
|
||||||
[![GolangCI](https://golangci.com/badges/github.com/go-openapi/errors.svg)](https://golangci.com)
|
|
||||||
[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/errors)](https://goreportcard.com/report/github.com/go-openapi/errors)
|
|
||||||
|
|
||||||
Shared errors and error interface used throughout the various libraries found in the go-openapi toolkit.
|
|
166
vendor/github.com/go-openapi/errors/api.go
generated
vendored
166
vendor/github.com/go-openapi/errors/api.go
generated
vendored
|
@ -1,166 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package errors
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// DefaultHTTPCode is used when the error Code cannot be used as an HTTP code.
|
|
||||||
var DefaultHTTPCode = 422
|
|
||||||
|
|
||||||
// Error represents a error interface all swagger framework errors implement
|
|
||||||
type Error interface {
|
|
||||||
error
|
|
||||||
Code() int32
|
|
||||||
}
|
|
||||||
|
|
||||||
type apiError struct {
|
|
||||||
code int32
|
|
||||||
message string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *apiError) Error() string {
|
|
||||||
return a.message
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *apiError) Code() int32 {
|
|
||||||
return a.code
|
|
||||||
}
|
|
||||||
|
|
||||||
// New creates a new API error with a code and a message
|
|
||||||
func New(code int32, message string, args ...interface{}) Error {
|
|
||||||
if len(args) > 0 {
|
|
||||||
return &apiError{code, fmt.Sprintf(message, args...)}
|
|
||||||
}
|
|
||||||
return &apiError{code, message}
|
|
||||||
}
|
|
||||||
|
|
||||||
// NotFound creates a new not found error
|
|
||||||
func NotFound(message string, args ...interface{}) Error {
|
|
||||||
if message == "" {
|
|
||||||
message = "Not found"
|
|
||||||
}
|
|
||||||
return New(http.StatusNotFound, fmt.Sprintf(message, args...))
|
|
||||||
}
|
|
||||||
|
|
||||||
// NotImplemented creates a new not implemented error
|
|
||||||
func NotImplemented(message string) Error {
|
|
||||||
return New(http.StatusNotImplemented, message)
|
|
||||||
}
|
|
||||||
|
|
||||||
// MethodNotAllowedError represents an error for when the path matches but the method doesn't
|
|
||||||
type MethodNotAllowedError struct {
|
|
||||||
code int32
|
|
||||||
Allowed []string
|
|
||||||
message string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *MethodNotAllowedError) Error() string {
|
|
||||||
return m.message
|
|
||||||
}
|
|
||||||
|
|
||||||
// Code the error code
|
|
||||||
func (m *MethodNotAllowedError) Code() int32 {
|
|
||||||
return m.code
|
|
||||||
}
|
|
||||||
|
|
||||||
func errorAsJSON(err Error) []byte {
|
|
||||||
b, _ := json.Marshal(struct {
|
|
||||||
Code int32 `json:"code"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
}{err.Code(), err.Error()})
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|
||||||
func flattenComposite(errs *CompositeError) *CompositeError {
|
|
||||||
var res []error
|
|
||||||
for _, er := range errs.Errors {
|
|
||||||
switch e := er.(type) {
|
|
||||||
case *CompositeError:
|
|
||||||
if len(e.Errors) > 0 {
|
|
||||||
flat := flattenComposite(e)
|
|
||||||
if len(flat.Errors) > 0 {
|
|
||||||
res = append(res, flat.Errors...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
if e != nil {
|
|
||||||
res = append(res, e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return CompositeValidationError(res...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// MethodNotAllowed creates a new method not allowed error
|
|
||||||
func MethodNotAllowed(requested string, allow []string) Error {
|
|
||||||
msg := fmt.Sprintf("method %s is not allowed, but [%s] are", requested, strings.Join(allow, ","))
|
|
||||||
return &MethodNotAllowedError{code: http.StatusMethodNotAllowed, Allowed: allow, message: msg}
|
|
||||||
}
|
|
||||||
|
|
||||||
const head = "HEAD"
|
|
||||||
|
|
||||||
// ServeError the error handler interface implementation
|
|
||||||
func ServeError(rw http.ResponseWriter, r *http.Request, err error) {
|
|
||||||
rw.Header().Set("Content-Type", "application/json")
|
|
||||||
switch e := err.(type) {
|
|
||||||
case *CompositeError:
|
|
||||||
er := flattenComposite(e)
|
|
||||||
// strips composite errors to first element only
|
|
||||||
if len(er.Errors) > 0 {
|
|
||||||
ServeError(rw, r, er.Errors[0])
|
|
||||||
} else {
|
|
||||||
// guard against empty CompositeError (invalid construct)
|
|
||||||
ServeError(rw, r, nil)
|
|
||||||
}
|
|
||||||
case *MethodNotAllowedError:
|
|
||||||
rw.Header().Add("Allow", strings.Join(err.(*MethodNotAllowedError).Allowed, ","))
|
|
||||||
rw.WriteHeader(asHTTPCode(int(e.Code())))
|
|
||||||
if r == nil || r.Method != head {
|
|
||||||
rw.Write(errorAsJSON(e))
|
|
||||||
}
|
|
||||||
case Error:
|
|
||||||
value := reflect.ValueOf(e)
|
|
||||||
if value.Kind() == reflect.Ptr && value.IsNil() {
|
|
||||||
rw.WriteHeader(http.StatusInternalServerError)
|
|
||||||
rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
rw.WriteHeader(asHTTPCode(int(e.Code())))
|
|
||||||
if r == nil || r.Method != head {
|
|
||||||
rw.Write(errorAsJSON(e))
|
|
||||||
}
|
|
||||||
case nil:
|
|
||||||
rw.WriteHeader(http.StatusInternalServerError)
|
|
||||||
rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
|
|
||||||
default:
|
|
||||||
rw.WriteHeader(http.StatusInternalServerError)
|
|
||||||
if r == nil || r.Method != head {
|
|
||||||
rw.Write(errorAsJSON(New(http.StatusInternalServerError, err.Error())))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func asHTTPCode(input int) int {
|
|
||||||
if input >= 600 {
|
|
||||||
return DefaultHTTPCode
|
|
||||||
}
|
|
||||||
return input
|
|
||||||
}
|
|
20
vendor/github.com/go-openapi/errors/auth.go
generated
vendored
20
vendor/github.com/go-openapi/errors/auth.go
generated
vendored
|
@ -1,20 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package errors
|
|
||||||
|
|
||||||
// Unauthenticated returns an unauthenticated error
|
|
||||||
func Unauthenticated(scheme string) Error {
|
|
||||||
return New(401, "unauthenticated for %s", scheme)
|
|
||||||
}
|
|
28
vendor/github.com/go-openapi/errors/doc.go
generated
vendored
28
vendor/github.com/go-openapi/errors/doc.go
generated
vendored
|
@ -1,28 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
/*
|
|
||||||
|
|
||||||
Package errors provides an Error interface and several concrete types
|
|
||||||
implementing this interface to manage API errors and JSON-schema validation
|
|
||||||
errors.
|
|
||||||
|
|
||||||
A middleware handler ServeError() is provided to serve the errors types
|
|
||||||
it defines.
|
|
||||||
|
|
||||||
It is used throughout the various go-openapi toolkit libraries
|
|
||||||
(https://github.com/go-openapi).
|
|
||||||
|
|
||||||
*/
|
|
||||||
package errors
|
|
7
vendor/github.com/go-openapi/errors/go.mod
generated
vendored
7
vendor/github.com/go-openapi/errors/go.mod
generated
vendored
|
@ -1,7 +0,0 @@
|
||||||
module github.com/go-openapi/errors
|
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
|
||||||
github.com/stretchr/testify v1.2.2
|
|
||||||
)
|
|
5
vendor/github.com/go-openapi/errors/go.sum
generated
vendored
5
vendor/github.com/go-openapi/errors/go.sum
generated
vendored
|
@ -1,5 +0,0 @@
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
|
||||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
|
85
vendor/github.com/go-openapi/errors/headers.go
generated
vendored
85
vendor/github.com/go-openapi/errors/headers.go
generated
vendored
|
@ -1,85 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package errors
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Validation represents a failure of a precondition
|
|
||||||
type Validation struct {
|
|
||||||
code int32
|
|
||||||
Name string
|
|
||||||
In string
|
|
||||||
Value interface{}
|
|
||||||
message string
|
|
||||||
Values []interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *Validation) Error() string {
|
|
||||||
return e.message
|
|
||||||
}
|
|
||||||
|
|
||||||
// Code the error code
|
|
||||||
func (e *Validation) Code() int32 {
|
|
||||||
return e.code
|
|
||||||
}
|
|
||||||
|
|
||||||
// ValidateName produces an error message name for an aliased property
|
|
||||||
func (e *Validation) ValidateName(name string) *Validation {
|
|
||||||
if e.Name == "" && name != "" {
|
|
||||||
e.Name = name
|
|
||||||
e.message = name + e.message
|
|
||||||
}
|
|
||||||
return e
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
contentTypeFail = `unsupported media type %q, only %v are allowed`
|
|
||||||
responseFormatFail = `unsupported media type requested, only %v are available`
|
|
||||||
)
|
|
||||||
|
|
||||||
// InvalidContentType error for an invalid content type
|
|
||||||
func InvalidContentType(value string, allowed []string) *Validation {
|
|
||||||
var values []interface{}
|
|
||||||
for _, v := range allowed {
|
|
||||||
values = append(values, v)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: http.StatusUnsupportedMediaType,
|
|
||||||
Name: "Content-Type",
|
|
||||||
In: "header",
|
|
||||||
Value: value,
|
|
||||||
Values: values,
|
|
||||||
message: fmt.Sprintf(contentTypeFail, value, allowed),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// InvalidResponseFormat error for an unacceptable response format request
|
|
||||||
func InvalidResponseFormat(value string, allowed []string) *Validation {
|
|
||||||
var values []interface{}
|
|
||||||
for _, v := range allowed {
|
|
||||||
values = append(values, v)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: http.StatusNotAcceptable,
|
|
||||||
Name: "Accept",
|
|
||||||
In: "header",
|
|
||||||
Value: value,
|
|
||||||
Values: values,
|
|
||||||
message: fmt.Sprintf(responseFormatFail, allowed),
|
|
||||||
}
|
|
||||||
}
|
|
51
vendor/github.com/go-openapi/errors/middleware.go
generated
vendored
51
vendor/github.com/go-openapi/errors/middleware.go
generated
vendored
|
@ -1,51 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package errors
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// APIVerificationFailed is an error that contains all the missing info for a mismatched section
|
|
||||||
// between the api registrations and the api spec
|
|
||||||
type APIVerificationFailed struct {
|
|
||||||
Section string
|
|
||||||
MissingSpecification []string
|
|
||||||
MissingRegistration []string
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
func (v *APIVerificationFailed) Error() string {
|
|
||||||
buf := bytes.NewBuffer(nil)
|
|
||||||
|
|
||||||
hasRegMissing := len(v.MissingRegistration) > 0
|
|
||||||
hasSpecMissing := len(v.MissingSpecification) > 0
|
|
||||||
|
|
||||||
if hasRegMissing {
|
|
||||||
buf.WriteString(fmt.Sprintf("missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section))
|
|
||||||
}
|
|
||||||
|
|
||||||
if hasRegMissing && hasSpecMissing {
|
|
||||||
buf.WriteString("\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
if hasSpecMissing {
|
|
||||||
buf.WriteString(fmt.Sprintf("missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section))
|
|
||||||
}
|
|
||||||
|
|
||||||
return buf.String()
|
|
||||||
}
|
|
59
vendor/github.com/go-openapi/errors/parsing.go
generated
vendored
59
vendor/github.com/go-openapi/errors/parsing.go
generated
vendored
|
@ -1,59 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package errors
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
// ParseError respresents a parsing error
|
|
||||||
type ParseError struct {
|
|
||||||
code int32
|
|
||||||
Name string
|
|
||||||
In string
|
|
||||||
Value string
|
|
||||||
Reason error
|
|
||||||
message string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *ParseError) Error() string {
|
|
||||||
return e.message
|
|
||||||
}
|
|
||||||
|
|
||||||
// Code returns the http status code for this error
|
|
||||||
func (e *ParseError) Code() int32 {
|
|
||||||
return e.code
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
parseErrorTemplContent = `parsing %s %s from %q failed, because %s`
|
|
||||||
parseErrorTemplContentNoIn = `parsing %s from %q failed, because %s`
|
|
||||||
)
|
|
||||||
|
|
||||||
// NewParseError creates a new parse error
|
|
||||||
func NewParseError(name, in, value string, reason error) *ParseError {
|
|
||||||
var msg string
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason)
|
|
||||||
} else {
|
|
||||||
msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason)
|
|
||||||
}
|
|
||||||
return &ParseError{
|
|
||||||
code: 400,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: value,
|
|
||||||
Reason: reason,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
562
vendor/github.com/go-openapi/errors/schema.go
generated
vendored
562
vendor/github.com/go-openapi/errors/schema.go
generated
vendored
|
@ -1,562 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package errors
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
invalidType = "%s is an invalid type name"
|
|
||||||
typeFail = "%s in %s must be of type %s"
|
|
||||||
typeFailWithData = "%s in %s must be of type %s: %q"
|
|
||||||
typeFailWithError = "%s in %s must be of type %s, because: %s"
|
|
||||||
requiredFail = "%s in %s is required"
|
|
||||||
tooLongMessage = "%s in %s should be at most %d chars long"
|
|
||||||
tooShortMessage = "%s in %s should be at least %d chars long"
|
|
||||||
patternFail = "%s in %s should match '%s'"
|
|
||||||
enumFail = "%s in %s should be one of %v"
|
|
||||||
multipleOfFail = "%s in %s should be a multiple of %v"
|
|
||||||
maxIncFail = "%s in %s should be less than or equal to %v"
|
|
||||||
maxExcFail = "%s in %s should be less than %v"
|
|
||||||
minIncFail = "%s in %s should be greater than or equal to %v"
|
|
||||||
minExcFail = "%s in %s should be greater than %v"
|
|
||||||
uniqueFail = "%s in %s shouldn't contain duplicates"
|
|
||||||
maxItemsFail = "%s in %s should have at most %d items"
|
|
||||||
minItemsFail = "%s in %s should have at least %d items"
|
|
||||||
typeFailNoIn = "%s must be of type %s"
|
|
||||||
typeFailWithDataNoIn = "%s must be of type %s: %q"
|
|
||||||
typeFailWithErrorNoIn = "%s must be of type %s, because: %s"
|
|
||||||
requiredFailNoIn = "%s is required"
|
|
||||||
tooLongMessageNoIn = "%s should be at most %d chars long"
|
|
||||||
tooShortMessageNoIn = "%s should be at least %d chars long"
|
|
||||||
patternFailNoIn = "%s should match '%s'"
|
|
||||||
enumFailNoIn = "%s should be one of %v"
|
|
||||||
multipleOfFailNoIn = "%s should be a multiple of %v"
|
|
||||||
maxIncFailNoIn = "%s should be less than or equal to %v"
|
|
||||||
maxExcFailNoIn = "%s should be less than %v"
|
|
||||||
minIncFailNoIn = "%s should be greater than or equal to %v"
|
|
||||||
minExcFailNoIn = "%s should be greater than %v"
|
|
||||||
uniqueFailNoIn = "%s shouldn't contain duplicates"
|
|
||||||
maxItemsFailNoIn = "%s should have at most %d items"
|
|
||||||
minItemsFailNoIn = "%s should have at least %d items"
|
|
||||||
noAdditionalItems = "%s in %s can't have additional items"
|
|
||||||
noAdditionalItemsNoIn = "%s can't have additional items"
|
|
||||||
tooFewProperties = "%s in %s should have at least %d properties"
|
|
||||||
tooFewPropertiesNoIn = "%s should have at least %d properties"
|
|
||||||
tooManyProperties = "%s in %s should have at most %d properties"
|
|
||||||
tooManyPropertiesNoIn = "%s should have at most %d properties"
|
|
||||||
unallowedProperty = "%s.%s in %s is a forbidden property"
|
|
||||||
unallowedPropertyNoIn = "%s.%s is a forbidden property"
|
|
||||||
failedAllPatternProps = "%s.%s in %s failed all pattern properties"
|
|
||||||
failedAllPatternPropsNoIn = "%s.%s failed all pattern properties"
|
|
||||||
multipleOfMustBePositive = "factor MultipleOf declared for %s must be positive: %v"
|
|
||||||
)
|
|
||||||
|
|
||||||
// All code responses can be used to differentiate errors for different handling
|
|
||||||
// by the consuming program
|
|
||||||
const (
|
|
||||||
// CompositeErrorCode remains 422 for backwards-compatibility
|
|
||||||
// and to separate it from validation errors with cause
|
|
||||||
CompositeErrorCode = 422
|
|
||||||
// InvalidTypeCode is used for any subclass of invalid types
|
|
||||||
InvalidTypeCode = 600 + iota
|
|
||||||
RequiredFailCode
|
|
||||||
TooLongFailCode
|
|
||||||
TooShortFailCode
|
|
||||||
PatternFailCode
|
|
||||||
EnumFailCode
|
|
||||||
MultipleOfFailCode
|
|
||||||
MaxFailCode
|
|
||||||
MinFailCode
|
|
||||||
UniqueFailCode
|
|
||||||
MaxItemsFailCode
|
|
||||||
MinItemsFailCode
|
|
||||||
NoAdditionalItemsCode
|
|
||||||
TooFewPropertiesCode
|
|
||||||
TooManyPropertiesCode
|
|
||||||
UnallowedPropertyCode
|
|
||||||
FailedAllPatternPropsCode
|
|
||||||
MultipleOfMustBePositiveCode
|
|
||||||
)
|
|
||||||
|
|
||||||
// CompositeError is an error that groups several errors together
|
|
||||||
type CompositeError struct {
|
|
||||||
Errors []error
|
|
||||||
code int32
|
|
||||||
message string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Code for this error
|
|
||||||
func (c *CompositeError) Code() int32 {
|
|
||||||
return c.code
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *CompositeError) Error() string {
|
|
||||||
if len(c.Errors) > 0 {
|
|
||||||
msgs := []string{c.message + ":"}
|
|
||||||
for _, e := range c.Errors {
|
|
||||||
msgs = append(msgs, e.Error())
|
|
||||||
}
|
|
||||||
return strings.Join(msgs, "\n")
|
|
||||||
}
|
|
||||||
return c.message
|
|
||||||
}
|
|
||||||
|
|
||||||
// CompositeValidationError an error to wrap a bunch of other errors
|
|
||||||
func CompositeValidationError(errors ...error) *CompositeError {
|
|
||||||
return &CompositeError{
|
|
||||||
code: CompositeErrorCode,
|
|
||||||
Errors: append([]error{}, errors...),
|
|
||||||
message: "validation failure list",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FailedAllPatternProperties an error for when the property doesn't match a pattern
|
|
||||||
func FailedAllPatternProperties(name, in, key string) *Validation {
|
|
||||||
msg := fmt.Sprintf(failedAllPatternProps, name, key, in)
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(failedAllPatternPropsNoIn, name, key)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: FailedAllPatternPropsCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: key,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// PropertyNotAllowed an error for when the property doesn't match a pattern
|
|
||||||
func PropertyNotAllowed(name, in, key string) *Validation {
|
|
||||||
msg := fmt.Sprintf(unallowedProperty, name, key, in)
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(unallowedPropertyNoIn, name, key)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: UnallowedPropertyCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: key,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TooFewProperties an error for an object with too few properties
|
|
||||||
func TooFewProperties(name, in string, n int64) *Validation {
|
|
||||||
msg := fmt.Sprintf(tooFewProperties, name, in, n)
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(tooFewPropertiesNoIn, name, n)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: TooFewPropertiesCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: n,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TooManyProperties an error for an object with too many properties
|
|
||||||
func TooManyProperties(name, in string, n int64) *Validation {
|
|
||||||
msg := fmt.Sprintf(tooManyProperties, name, in, n)
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(tooManyPropertiesNoIn, name, n)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: TooManyPropertiesCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: n,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// AdditionalItemsNotAllowed an error for invalid additional items
|
|
||||||
func AdditionalItemsNotAllowed(name, in string) *Validation {
|
|
||||||
msg := fmt.Sprintf(noAdditionalItems, name, in)
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(noAdditionalItemsNoIn, name)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: NoAdditionalItemsCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// InvalidCollectionFormat another flavor of invalid type error
|
|
||||||
func InvalidCollectionFormat(name, in, format string) *Validation {
|
|
||||||
return &Validation{
|
|
||||||
code: InvalidTypeCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: format,
|
|
||||||
message: fmt.Sprintf("the collection format %q is not supported for the %s param %q", format, in, name),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// InvalidTypeName an error for when the type is invalid
|
|
||||||
func InvalidTypeName(typeName string) *Validation {
|
|
||||||
return &Validation{
|
|
||||||
code: InvalidTypeCode,
|
|
||||||
Value: typeName,
|
|
||||||
message: fmt.Sprintf(invalidType, typeName),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// InvalidType creates an error for when the type is invalid
|
|
||||||
func InvalidType(name, in, typeName string, value interface{}) *Validation {
|
|
||||||
var message string
|
|
||||||
|
|
||||||
if in != "" {
|
|
||||||
switch value.(type) {
|
|
||||||
case string:
|
|
||||||
message = fmt.Sprintf(typeFailWithData, name, in, typeName, value)
|
|
||||||
case error:
|
|
||||||
message = fmt.Sprintf(typeFailWithError, name, in, typeName, value)
|
|
||||||
default:
|
|
||||||
message = fmt.Sprintf(typeFail, name, in, typeName)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
switch value.(type) {
|
|
||||||
case string:
|
|
||||||
message = fmt.Sprintf(typeFailWithDataNoIn, name, typeName, value)
|
|
||||||
case error:
|
|
||||||
message = fmt.Sprintf(typeFailWithErrorNoIn, name, typeName, value)
|
|
||||||
default:
|
|
||||||
message = fmt.Sprintf(typeFailNoIn, name, typeName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Validation{
|
|
||||||
code: InvalidTypeCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: value,
|
|
||||||
message: message,
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// DuplicateItems error for when an array contains duplicates
|
|
||||||
func DuplicateItems(name, in string) *Validation {
|
|
||||||
msg := fmt.Sprintf(uniqueFail, name, in)
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(uniqueFailNoIn, name)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: UniqueFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TooManyItems error for when an array contains too many items
|
|
||||||
func TooManyItems(name, in string, max int64) *Validation {
|
|
||||||
msg := fmt.Sprintf(maxItemsFail, name, in, max)
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(maxItemsFailNoIn, name, max)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Validation{
|
|
||||||
code: MaxItemsFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TooFewItems error for when an array contains too few items
|
|
||||||
func TooFewItems(name, in string, min int64) *Validation {
|
|
||||||
msg := fmt.Sprintf(minItemsFail, name, in, min)
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(minItemsFailNoIn, name, min)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: MinItemsFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExceedsMaximumInt error for when maxinum validation fails
|
|
||||||
func ExceedsMaximumInt(name, in string, max int64, exclusive bool) *Validation {
|
|
||||||
var message string
|
|
||||||
if in == "" {
|
|
||||||
m := maxIncFailNoIn
|
|
||||||
if exclusive {
|
|
||||||
m = maxExcFailNoIn
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, max)
|
|
||||||
} else {
|
|
||||||
m := maxIncFail
|
|
||||||
if exclusive {
|
|
||||||
m = maxExcFail
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, in, max)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: MaxFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: max,
|
|
||||||
message: message,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExceedsMaximumUint error for when maxinum validation fails
|
|
||||||
func ExceedsMaximumUint(name, in string, max uint64, exclusive bool) *Validation {
|
|
||||||
var message string
|
|
||||||
if in == "" {
|
|
||||||
m := maxIncFailNoIn
|
|
||||||
if exclusive {
|
|
||||||
m = maxExcFailNoIn
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, max)
|
|
||||||
} else {
|
|
||||||
m := maxIncFail
|
|
||||||
if exclusive {
|
|
||||||
m = maxExcFail
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, in, max)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: MaxFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: max,
|
|
||||||
message: message,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExceedsMaximum error for when maxinum validation fails
|
|
||||||
func ExceedsMaximum(name, in string, max float64, exclusive bool) *Validation {
|
|
||||||
var message string
|
|
||||||
if in == "" {
|
|
||||||
m := maxIncFailNoIn
|
|
||||||
if exclusive {
|
|
||||||
m = maxExcFailNoIn
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, max)
|
|
||||||
} else {
|
|
||||||
m := maxIncFail
|
|
||||||
if exclusive {
|
|
||||||
m = maxExcFail
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, in, max)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: MaxFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: max,
|
|
||||||
message: message,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExceedsMinimumInt error for when maxinum validation fails
|
|
||||||
func ExceedsMinimumInt(name, in string, min int64, exclusive bool) *Validation {
|
|
||||||
var message string
|
|
||||||
if in == "" {
|
|
||||||
m := minIncFailNoIn
|
|
||||||
if exclusive {
|
|
||||||
m = minExcFailNoIn
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, min)
|
|
||||||
} else {
|
|
||||||
m := minIncFail
|
|
||||||
if exclusive {
|
|
||||||
m = minExcFail
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, in, min)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: MinFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: min,
|
|
||||||
message: message,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExceedsMinimumUint error for when maxinum validation fails
|
|
||||||
func ExceedsMinimumUint(name, in string, min uint64, exclusive bool) *Validation {
|
|
||||||
var message string
|
|
||||||
if in == "" {
|
|
||||||
m := minIncFailNoIn
|
|
||||||
if exclusive {
|
|
||||||
m = minExcFailNoIn
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, min)
|
|
||||||
} else {
|
|
||||||
m := minIncFail
|
|
||||||
if exclusive {
|
|
||||||
m = minExcFail
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, in, min)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: MinFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: min,
|
|
||||||
message: message,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExceedsMinimum error for when maxinum validation fails
|
|
||||||
func ExceedsMinimum(name, in string, min float64, exclusive bool) *Validation {
|
|
||||||
var message string
|
|
||||||
if in == "" {
|
|
||||||
m := minIncFailNoIn
|
|
||||||
if exclusive {
|
|
||||||
m = minExcFailNoIn
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, min)
|
|
||||||
} else {
|
|
||||||
m := minIncFail
|
|
||||||
if exclusive {
|
|
||||||
m = minExcFail
|
|
||||||
}
|
|
||||||
message = fmt.Sprintf(m, name, in, min)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: MinFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: min,
|
|
||||||
message: message,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// NotMultipleOf error for when multiple of validation fails
|
|
||||||
func NotMultipleOf(name, in string, multiple interface{}) *Validation {
|
|
||||||
var msg string
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(multipleOfFailNoIn, name, multiple)
|
|
||||||
} else {
|
|
||||||
msg = fmt.Sprintf(multipleOfFail, name, in, multiple)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: MultipleOfFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: multiple,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// EnumFail error for when an enum validation fails
|
|
||||||
func EnumFail(name, in string, value interface{}, values []interface{}) *Validation {
|
|
||||||
var msg string
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(enumFailNoIn, name, values)
|
|
||||||
} else {
|
|
||||||
msg = fmt.Sprintf(enumFail, name, in, values)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Validation{
|
|
||||||
code: EnumFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: value,
|
|
||||||
Values: values,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Required error for when a value is missing
|
|
||||||
func Required(name, in string) *Validation {
|
|
||||||
var msg string
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(requiredFailNoIn, name)
|
|
||||||
} else {
|
|
||||||
msg = fmt.Sprintf(requiredFail, name, in)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: RequiredFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TooLong error for when a string is too long
|
|
||||||
func TooLong(name, in string, max int64) *Validation {
|
|
||||||
var msg string
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(tooLongMessageNoIn, name, max)
|
|
||||||
} else {
|
|
||||||
msg = fmt.Sprintf(tooLongMessage, name, in, max)
|
|
||||||
}
|
|
||||||
return &Validation{
|
|
||||||
code: TooLongFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TooShort error for when a string is too short
|
|
||||||
func TooShort(name, in string, min int64) *Validation {
|
|
||||||
var msg string
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(tooShortMessageNoIn, name, min)
|
|
||||||
} else {
|
|
||||||
msg = fmt.Sprintf(tooShortMessage, name, in, min)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Validation{
|
|
||||||
code: TooShortFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FailedPattern error for when a string fails a regex pattern match
|
|
||||||
// the pattern that is returned is the ECMA syntax version of the pattern not the golang version.
|
|
||||||
func FailedPattern(name, in, pattern string) *Validation {
|
|
||||||
var msg string
|
|
||||||
if in == "" {
|
|
||||||
msg = fmt.Sprintf(patternFailNoIn, name, pattern)
|
|
||||||
} else {
|
|
||||||
msg = fmt.Sprintf(patternFail, name, in, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Validation{
|
|
||||||
code: PatternFailCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
message: msg,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MultipleOfMustBePositive error for when a
|
|
||||||
// multipleOf factor is negative
|
|
||||||
func MultipleOfMustBePositive(name, in string, factor interface{}) *Validation {
|
|
||||||
return &Validation{
|
|
||||||
code: MultipleOfMustBePositiveCode,
|
|
||||||
Name: name,
|
|
||||||
In: in,
|
|
||||||
Value: factor,
|
|
||||||
message: fmt.Sprintf(multipleOfMustBePositive, name, factor),
|
|
||||||
}
|
|
||||||
}
|
|
1
vendor/github.com/go-openapi/inflect/.hgignore
generated
vendored
1
vendor/github.com/go-openapi/inflect/.hgignore
generated
vendored
|
@ -1 +0,0 @@
|
||||||
swp$
|
|
7
vendor/github.com/go-openapi/inflect/LICENCE
generated
vendored
7
vendor/github.com/go-openapi/inflect/LICENCE
generated
vendored
|
@ -1,7 +0,0 @@
|
||||||
Copyright (c) 2011 Chris Farmiloe
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
168
vendor/github.com/go-openapi/inflect/README
generated
vendored
168
vendor/github.com/go-openapi/inflect/README
generated
vendored
|
@ -1,168 +0,0 @@
|
||||||
INSTALLATION
|
|
||||||
|
|
||||||
go get bitbucket.org/pkg/inflect
|
|
||||||
|
|
||||||
PACKAGE
|
|
||||||
|
|
||||||
package inflect
|
|
||||||
|
|
||||||
|
|
||||||
FUNCTIONS
|
|
||||||
|
|
||||||
func AddAcronym(word string)
|
|
||||||
|
|
||||||
func AddHuman(suffix, replacement string)
|
|
||||||
|
|
||||||
func AddIrregular(singular, plural string)
|
|
||||||
|
|
||||||
func AddPlural(suffix, replacement string)
|
|
||||||
|
|
||||||
func AddSingular(suffix, replacement string)
|
|
||||||
|
|
||||||
func AddUncountable(word string)
|
|
||||||
|
|
||||||
func Asciify(word string) string
|
|
||||||
|
|
||||||
func Camelize(word string) string
|
|
||||||
|
|
||||||
func CamelizeDownFirst(word string) string
|
|
||||||
|
|
||||||
func Capitalize(word string) string
|
|
||||||
|
|
||||||
func Dasherize(word string) string
|
|
||||||
|
|
||||||
func ForeignKey(word string) string
|
|
||||||
|
|
||||||
func ForeignKeyCondensed(word string) string
|
|
||||||
|
|
||||||
func Humanize(word string) string
|
|
||||||
|
|
||||||
func Ordinalize(word string) string
|
|
||||||
|
|
||||||
func Parameterize(word string) string
|
|
||||||
|
|
||||||
func ParameterizeJoin(word, sep string) string
|
|
||||||
|
|
||||||
func Pluralize(word string) string
|
|
||||||
|
|
||||||
func Singularize(word string) string
|
|
||||||
|
|
||||||
func Tableize(word string) string
|
|
||||||
|
|
||||||
func Titleize(word string) string
|
|
||||||
|
|
||||||
func Typeify(word string) string
|
|
||||||
|
|
||||||
func Uncountables() map[string]bool
|
|
||||||
|
|
||||||
func Underscore(word string) string
|
|
||||||
|
|
||||||
|
|
||||||
TYPES
|
|
||||||
|
|
||||||
type Rule struct {
|
|
||||||
// contains filtered or unexported fields
|
|
||||||
}
|
|
||||||
used by rulesets
|
|
||||||
|
|
||||||
type Ruleset struct {
|
|
||||||
// contains filtered or unexported fields
|
|
||||||
}
|
|
||||||
a Ruleset is the config of pluralization rules
|
|
||||||
you can extend the rules with the Add* methods
|
|
||||||
|
|
||||||
func NewDefaultRuleset() *Ruleset
|
|
||||||
create a new ruleset and load it with the default
|
|
||||||
set of common English pluralization rules
|
|
||||||
|
|
||||||
func NewRuleset() *Ruleset
|
|
||||||
create a blank ruleset. Unless you are going to
|
|
||||||
build your own rules from scratch you probably
|
|
||||||
won't need this and can just use the defaultRuleset
|
|
||||||
via the global inflect.* methods
|
|
||||||
|
|
||||||
func (rs *Ruleset) AddAcronym(word string)
|
|
||||||
if you use acronym you may need to add them to the ruleset
|
|
||||||
to prevent Underscored words of things like "HTML" coming out
|
|
||||||
as "h_t_m_l"
|
|
||||||
|
|
||||||
func (rs *Ruleset) AddHuman(suffix, replacement string)
|
|
||||||
Human rules are applied by humanize to show more friendly
|
|
||||||
versions of words
|
|
||||||
|
|
||||||
func (rs *Ruleset) AddIrregular(singular, plural string)
|
|
||||||
Add any inconsistant pluralizing/sinularizing rules
|
|
||||||
to the set here.
|
|
||||||
|
|
||||||
func (rs *Ruleset) AddPlural(suffix, replacement string)
|
|
||||||
add a pluralization rule
|
|
||||||
|
|
||||||
func (rs *Ruleset) AddPluralExact(suffix, replacement string, exact bool)
|
|
||||||
add a pluralization rule with full string match
|
|
||||||
|
|
||||||
func (rs *Ruleset) AddSingular(suffix, replacement string)
|
|
||||||
add a singular rule
|
|
||||||
|
|
||||||
func (rs *Ruleset) AddSingularExact(suffix, replacement string, exact bool)
|
|
||||||
same as AddSingular but you can set `exact` to force
|
|
||||||
a full string match
|
|
||||||
|
|
||||||
func (rs *Ruleset) AddUncountable(word string)
|
|
||||||
add a word to this ruleset that has the same singular and plural form
|
|
||||||
for example: "rice"
|
|
||||||
|
|
||||||
func (rs *Ruleset) Asciify(word string) string
|
|
||||||
transforms latin characters like é -> e
|
|
||||||
|
|
||||||
func (rs *Ruleset) Camelize(word string) string
|
|
||||||
"dino_party" -> "DinoParty"
|
|
||||||
|
|
||||||
func (rs *Ruleset) CamelizeDownFirst(word string) string
|
|
||||||
same as Camelcase but with first letter downcased
|
|
||||||
|
|
||||||
func (rs *Ruleset) Capitalize(word string) string
|
|
||||||
uppercase first character
|
|
||||||
|
|
||||||
func (rs *Ruleset) Dasherize(word string) string
|
|
||||||
"SomeText" -> "some-text"
|
|
||||||
|
|
||||||
func (rs *Ruleset) ForeignKey(word string) string
|
|
||||||
an underscored foreign key name "Person" -> "person_id"
|
|
||||||
|
|
||||||
func (rs *Ruleset) ForeignKeyCondensed(word string) string
|
|
||||||
a foreign key (with an underscore) "Person" -> "personid"
|
|
||||||
|
|
||||||
func (rs *Ruleset) Humanize(word string) string
|
|
||||||
First letter of sentance captitilized
|
|
||||||
Uses custom friendly replacements via AddHuman()
|
|
||||||
|
|
||||||
func (rs *Ruleset) Ordinalize(str string) string
|
|
||||||
"1031" -> "1031st"
|
|
||||||
|
|
||||||
func (rs *Ruleset) Parameterize(word string) string
|
|
||||||
param safe dasherized names like "my-param"
|
|
||||||
|
|
||||||
func (rs *Ruleset) ParameterizeJoin(word, sep string) string
|
|
||||||
param safe dasherized names with custom seperator
|
|
||||||
|
|
||||||
func (rs *Ruleset) Pluralize(word string) string
|
|
||||||
returns the plural form of a singular word
|
|
||||||
|
|
||||||
func (rs *Ruleset) Singularize(word string) string
|
|
||||||
returns the singular form of a plural word
|
|
||||||
|
|
||||||
func (rs *Ruleset) Tableize(word string) string
|
|
||||||
Rails style pluralized table names: "SuperPerson" -> "super_people"
|
|
||||||
|
|
||||||
func (rs *Ruleset) Titleize(word string) string
|
|
||||||
Captitilize every word in sentance "hello there" -> "Hello There"
|
|
||||||
|
|
||||||
func (rs *Ruleset) Typeify(word string) string
|
|
||||||
"something_like_this" -> "SomethingLikeThis"
|
|
||||||
|
|
||||||
func (rs *Ruleset) Uncountables() map[string]bool
|
|
||||||
|
|
||||||
func (rs *Ruleset) Underscore(word string) string
|
|
||||||
lowercase underscore version "BigBen" -> "big_ben"
|
|
||||||
|
|
||||||
|
|
1
vendor/github.com/go-openapi/inflect/go.mod
generated
vendored
1
vendor/github.com/go-openapi/inflect/go.mod
generated
vendored
|
@ -1 +0,0 @@
|
||||||
module github.com/go-openapi/inflect
|
|
713
vendor/github.com/go-openapi/inflect/inflect.go
generated
vendored
713
vendor/github.com/go-openapi/inflect/inflect.go
generated
vendored
|
@ -1,713 +0,0 @@
|
||||||
package inflect
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"regexp"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// used by rulesets
|
|
||||||
type Rule struct {
|
|
||||||
suffix string
|
|
||||||
replacement string
|
|
||||||
exact bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// a Ruleset is the config of pluralization rules
|
|
||||||
// you can extend the rules with the Add* methods
|
|
||||||
type Ruleset struct {
|
|
||||||
uncountables map[string]bool
|
|
||||||
plurals []*Rule
|
|
||||||
singulars []*Rule
|
|
||||||
humans []*Rule
|
|
||||||
acronyms []*Rule
|
|
||||||
acronymMatcher *regexp.Regexp
|
|
||||||
}
|
|
||||||
|
|
||||||
// create a blank ruleset. Unless you are going to
|
|
||||||
// build your own rules from scratch you probably
|
|
||||||
// won't need this and can just use the defaultRuleset
|
|
||||||
// via the global inflect.* methods
|
|
||||||
func NewRuleset() *Ruleset {
|
|
||||||
rs := new(Ruleset)
|
|
||||||
rs.uncountables = make(map[string]bool)
|
|
||||||
rs.plurals = make([]*Rule, 0)
|
|
||||||
rs.singulars = make([]*Rule, 0)
|
|
||||||
rs.humans = make([]*Rule, 0)
|
|
||||||
rs.acronyms = make([]*Rule, 0)
|
|
||||||
return rs
|
|
||||||
}
|
|
||||||
|
|
||||||
// create a new ruleset and load it with the default
|
|
||||||
// set of common English pluralization rules
|
|
||||||
func NewDefaultRuleset() *Ruleset {
|
|
||||||
rs := NewRuleset()
|
|
||||||
rs.AddPlural("s", "s")
|
|
||||||
rs.AddPlural("testis", "testes")
|
|
||||||
rs.AddPlural("axis", "axes")
|
|
||||||
rs.AddPlural("octopus", "octopi")
|
|
||||||
rs.AddPlural("virus", "viri")
|
|
||||||
rs.AddPlural("octopi", "octopi")
|
|
||||||
rs.AddPlural("viri", "viri")
|
|
||||||
rs.AddPlural("alias", "aliases")
|
|
||||||
rs.AddPlural("status", "statuses")
|
|
||||||
rs.AddPlural("bus", "buses")
|
|
||||||
rs.AddPlural("buffalo", "buffaloes")
|
|
||||||
rs.AddPlural("tomato", "tomatoes")
|
|
||||||
rs.AddPlural("tum", "ta")
|
|
||||||
rs.AddPlural("ium", "ia")
|
|
||||||
rs.AddPlural("ta", "ta")
|
|
||||||
rs.AddPlural("ia", "ia")
|
|
||||||
rs.AddPlural("sis", "ses")
|
|
||||||
rs.AddPlural("lf", "lves")
|
|
||||||
rs.AddPlural("rf", "rves")
|
|
||||||
rs.AddPlural("afe", "aves")
|
|
||||||
rs.AddPlural("bfe", "bves")
|
|
||||||
rs.AddPlural("cfe", "cves")
|
|
||||||
rs.AddPlural("dfe", "dves")
|
|
||||||
rs.AddPlural("efe", "eves")
|
|
||||||
rs.AddPlural("gfe", "gves")
|
|
||||||
rs.AddPlural("hfe", "hves")
|
|
||||||
rs.AddPlural("ife", "ives")
|
|
||||||
rs.AddPlural("jfe", "jves")
|
|
||||||
rs.AddPlural("kfe", "kves")
|
|
||||||
rs.AddPlural("lfe", "lves")
|
|
||||||
rs.AddPlural("mfe", "mves")
|
|
||||||
rs.AddPlural("nfe", "nves")
|
|
||||||
rs.AddPlural("ofe", "oves")
|
|
||||||
rs.AddPlural("pfe", "pves")
|
|
||||||
rs.AddPlural("qfe", "qves")
|
|
||||||
rs.AddPlural("rfe", "rves")
|
|
||||||
rs.AddPlural("sfe", "sves")
|
|
||||||
rs.AddPlural("tfe", "tves")
|
|
||||||
rs.AddPlural("ufe", "uves")
|
|
||||||
rs.AddPlural("vfe", "vves")
|
|
||||||
rs.AddPlural("wfe", "wves")
|
|
||||||
rs.AddPlural("xfe", "xves")
|
|
||||||
rs.AddPlural("yfe", "yves")
|
|
||||||
rs.AddPlural("zfe", "zves")
|
|
||||||
rs.AddPlural("hive", "hives")
|
|
||||||
rs.AddPlural("quy", "quies")
|
|
||||||
rs.AddPlural("by", "bies")
|
|
||||||
rs.AddPlural("cy", "cies")
|
|
||||||
rs.AddPlural("dy", "dies")
|
|
||||||
rs.AddPlural("fy", "fies")
|
|
||||||
rs.AddPlural("gy", "gies")
|
|
||||||
rs.AddPlural("hy", "hies")
|
|
||||||
rs.AddPlural("jy", "jies")
|
|
||||||
rs.AddPlural("ky", "kies")
|
|
||||||
rs.AddPlural("ly", "lies")
|
|
||||||
rs.AddPlural("my", "mies")
|
|
||||||
rs.AddPlural("ny", "nies")
|
|
||||||
rs.AddPlural("py", "pies")
|
|
||||||
rs.AddPlural("qy", "qies")
|
|
||||||
rs.AddPlural("ry", "ries")
|
|
||||||
rs.AddPlural("sy", "sies")
|
|
||||||
rs.AddPlural("ty", "ties")
|
|
||||||
rs.AddPlural("vy", "vies")
|
|
||||||
rs.AddPlural("wy", "wies")
|
|
||||||
rs.AddPlural("xy", "xies")
|
|
||||||
rs.AddPlural("zy", "zies")
|
|
||||||
rs.AddPlural("x", "xes")
|
|
||||||
rs.AddPlural("ch", "ches")
|
|
||||||
rs.AddPlural("ss", "sses")
|
|
||||||
rs.AddPlural("sh", "shes")
|
|
||||||
rs.AddPlural("matrix", "matrices")
|
|
||||||
rs.AddPlural("vertix", "vertices")
|
|
||||||
rs.AddPlural("indix", "indices")
|
|
||||||
rs.AddPlural("matrex", "matrices")
|
|
||||||
rs.AddPlural("vertex", "vertices")
|
|
||||||
rs.AddPlural("index", "indices")
|
|
||||||
rs.AddPlural("mouse", "mice")
|
|
||||||
rs.AddPlural("louse", "lice")
|
|
||||||
rs.AddPlural("mice", "mice")
|
|
||||||
rs.AddPlural("lice", "lice")
|
|
||||||
rs.AddPluralExact("ox", "oxen", true)
|
|
||||||
rs.AddPluralExact("oxen", "oxen", true)
|
|
||||||
rs.AddPluralExact("quiz", "quizzes", true)
|
|
||||||
rs.AddSingular("s", "")
|
|
||||||
rs.AddSingular("news", "news")
|
|
||||||
rs.AddSingular("ta", "tum")
|
|
||||||
rs.AddSingular("ia", "ium")
|
|
||||||
rs.AddSingular("analyses", "analysis")
|
|
||||||
rs.AddSingular("bases", "basis")
|
|
||||||
rs.AddSingular("diagnoses", "diagnosis")
|
|
||||||
rs.AddSingular("parentheses", "parenthesis")
|
|
||||||
rs.AddSingular("prognoses", "prognosis")
|
|
||||||
rs.AddSingular("synopses", "synopsis")
|
|
||||||
rs.AddSingular("theses", "thesis")
|
|
||||||
rs.AddSingular("analyses", "analysis")
|
|
||||||
rs.AddSingular("aves", "afe")
|
|
||||||
rs.AddSingular("bves", "bfe")
|
|
||||||
rs.AddSingular("cves", "cfe")
|
|
||||||
rs.AddSingular("dves", "dfe")
|
|
||||||
rs.AddSingular("eves", "efe")
|
|
||||||
rs.AddSingular("gves", "gfe")
|
|
||||||
rs.AddSingular("hves", "hfe")
|
|
||||||
rs.AddSingular("ives", "ife")
|
|
||||||
rs.AddSingular("jves", "jfe")
|
|
||||||
rs.AddSingular("kves", "kfe")
|
|
||||||
rs.AddSingular("lves", "lfe")
|
|
||||||
rs.AddSingular("mves", "mfe")
|
|
||||||
rs.AddSingular("nves", "nfe")
|
|
||||||
rs.AddSingular("oves", "ofe")
|
|
||||||
rs.AddSingular("pves", "pfe")
|
|
||||||
rs.AddSingular("qves", "qfe")
|
|
||||||
rs.AddSingular("rves", "rfe")
|
|
||||||
rs.AddSingular("sves", "sfe")
|
|
||||||
rs.AddSingular("tves", "tfe")
|
|
||||||
rs.AddSingular("uves", "ufe")
|
|
||||||
rs.AddSingular("vves", "vfe")
|
|
||||||
rs.AddSingular("wves", "wfe")
|
|
||||||
rs.AddSingular("xves", "xfe")
|
|
||||||
rs.AddSingular("yves", "yfe")
|
|
||||||
rs.AddSingular("zves", "zfe")
|
|
||||||
rs.AddSingular("hives", "hive")
|
|
||||||
rs.AddSingular("tives", "tive")
|
|
||||||
rs.AddSingular("lves", "lf")
|
|
||||||
rs.AddSingular("rves", "rf")
|
|
||||||
rs.AddSingular("quies", "quy")
|
|
||||||
rs.AddSingular("bies", "by")
|
|
||||||
rs.AddSingular("cies", "cy")
|
|
||||||
rs.AddSingular("dies", "dy")
|
|
||||||
rs.AddSingular("fies", "fy")
|
|
||||||
rs.AddSingular("gies", "gy")
|
|
||||||
rs.AddSingular("hies", "hy")
|
|
||||||
rs.AddSingular("jies", "jy")
|
|
||||||
rs.AddSingular("kies", "ky")
|
|
||||||
rs.AddSingular("lies", "ly")
|
|
||||||
rs.AddSingular("mies", "my")
|
|
||||||
rs.AddSingular("nies", "ny")
|
|
||||||
rs.AddSingular("pies", "py")
|
|
||||||
rs.AddSingular("qies", "qy")
|
|
||||||
rs.AddSingular("ries", "ry")
|
|
||||||
rs.AddSingular("sies", "sy")
|
|
||||||
rs.AddSingular("ties", "ty")
|
|
||||||
rs.AddSingular("vies", "vy")
|
|
||||||
rs.AddSingular("wies", "wy")
|
|
||||||
rs.AddSingular("xies", "xy")
|
|
||||||
rs.AddSingular("zies", "zy")
|
|
||||||
rs.AddSingular("series", "series")
|
|
||||||
rs.AddSingular("movies", "movie")
|
|
||||||
rs.AddSingular("xes", "x")
|
|
||||||
rs.AddSingular("ches", "ch")
|
|
||||||
rs.AddSingular("sses", "ss")
|
|
||||||
rs.AddSingular("shes", "sh")
|
|
||||||
rs.AddSingular("mice", "mouse")
|
|
||||||
rs.AddSingular("lice", "louse")
|
|
||||||
rs.AddSingular("buses", "bus")
|
|
||||||
rs.AddSingular("oes", "o")
|
|
||||||
rs.AddSingular("shoes", "shoe")
|
|
||||||
rs.AddSingular("crises", "crisis")
|
|
||||||
rs.AddSingular("axes", "axis")
|
|
||||||
rs.AddSingular("testes", "testis")
|
|
||||||
rs.AddSingular("octopi", "octopus")
|
|
||||||
rs.AddSingular("viri", "virus")
|
|
||||||
rs.AddSingular("statuses", "status")
|
|
||||||
rs.AddSingular("aliases", "alias")
|
|
||||||
rs.AddSingularExact("oxen", "ox", true)
|
|
||||||
rs.AddSingular("vertices", "vertex")
|
|
||||||
rs.AddSingular("indices", "index")
|
|
||||||
rs.AddSingular("matrices", "matrix")
|
|
||||||
rs.AddSingularExact("quizzes", "quiz", true)
|
|
||||||
rs.AddSingular("databases", "database")
|
|
||||||
rs.AddIrregular("person", "people")
|
|
||||||
rs.AddIrregular("man", "men")
|
|
||||||
rs.AddIrregular("child", "children")
|
|
||||||
rs.AddIrregular("sex", "sexes")
|
|
||||||
rs.AddIrregular("move", "moves")
|
|
||||||
rs.AddIrregular("zombie", "zombies")
|
|
||||||
rs.AddUncountable("equipment")
|
|
||||||
rs.AddUncountable("information")
|
|
||||||
rs.AddUncountable("rice")
|
|
||||||
rs.AddUncountable("money")
|
|
||||||
rs.AddUncountable("species")
|
|
||||||
rs.AddUncountable("series")
|
|
||||||
rs.AddUncountable("fish")
|
|
||||||
rs.AddUncountable("sheep")
|
|
||||||
rs.AddUncountable("jeans")
|
|
||||||
rs.AddUncountable("police")
|
|
||||||
return rs
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rs *Ruleset) Uncountables() map[string]bool {
|
|
||||||
return rs.uncountables
|
|
||||||
}
|
|
||||||
|
|
||||||
// add a pluralization rule
|
|
||||||
func (rs *Ruleset) AddPlural(suffix, replacement string) {
|
|
||||||
rs.AddPluralExact(suffix, replacement, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
// add a pluralization rule with full string match
|
|
||||||
func (rs *Ruleset) AddPluralExact(suffix, replacement string, exact bool) {
|
|
||||||
// remove uncountable
|
|
||||||
delete(rs.uncountables, suffix)
|
|
||||||
// create rule
|
|
||||||
r := new(Rule)
|
|
||||||
r.suffix = suffix
|
|
||||||
r.replacement = replacement
|
|
||||||
r.exact = exact
|
|
||||||
// prepend
|
|
||||||
rs.plurals = append([]*Rule{r}, rs.plurals...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// add a singular rule
|
|
||||||
func (rs *Ruleset) AddSingular(suffix, replacement string) {
|
|
||||||
rs.AddSingularExact(suffix, replacement, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
// same as AddSingular but you can set `exact` to force
|
|
||||||
// a full string match
|
|
||||||
func (rs *Ruleset) AddSingularExact(suffix, replacement string, exact bool) {
|
|
||||||
// remove from uncountable
|
|
||||||
delete(rs.uncountables, suffix)
|
|
||||||
// create rule
|
|
||||||
r := new(Rule)
|
|
||||||
r.suffix = suffix
|
|
||||||
r.replacement = replacement
|
|
||||||
r.exact = exact
|
|
||||||
rs.singulars = append([]*Rule{r}, rs.singulars...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Human rules are applied by humanize to show more friendly
|
|
||||||
// versions of words
|
|
||||||
func (rs *Ruleset) AddHuman(suffix, replacement string) {
|
|
||||||
r := new(Rule)
|
|
||||||
r.suffix = suffix
|
|
||||||
r.replacement = replacement
|
|
||||||
rs.humans = append([]*Rule{r}, rs.humans...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add any inconsistant pluralizing/sinularizing rules
|
|
||||||
// to the set here.
|
|
||||||
func (rs *Ruleset) AddIrregular(singular, plural string) {
|
|
||||||
delete(rs.uncountables, singular)
|
|
||||||
delete(rs.uncountables, plural)
|
|
||||||
rs.AddPlural(singular, plural)
|
|
||||||
rs.AddPlural(plural, plural)
|
|
||||||
rs.AddSingular(plural, singular)
|
|
||||||
}
|
|
||||||
|
|
||||||
// if you use acronym you may need to add them to the ruleset
|
|
||||||
// to prevent Underscored words of things like "HTML" coming out
|
|
||||||
// as "h_t_m_l"
|
|
||||||
func (rs *Ruleset) AddAcronym(word string) {
|
|
||||||
r := new(Rule)
|
|
||||||
r.suffix = word
|
|
||||||
r.replacement = rs.Titleize(strings.ToLower(word))
|
|
||||||
rs.acronyms = append(rs.acronyms, r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// add a word to this ruleset that has the same singular and plural form
|
|
||||||
// for example: "rice"
|
|
||||||
func (rs *Ruleset) AddUncountable(word string) {
|
|
||||||
rs.uncountables[strings.ToLower(word)] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rs *Ruleset) isUncountable(word string) bool {
|
|
||||||
// handle multiple words by using the last one
|
|
||||||
words := strings.Split(word, " ")
|
|
||||||
if _, exists := rs.uncountables[strings.ToLower(words[len(words)-1])]; exists {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// returns the plural form of a singular word
|
|
||||||
func (rs *Ruleset) Pluralize(word string) string {
|
|
||||||
if len(word) == 0 {
|
|
||||||
return word
|
|
||||||
}
|
|
||||||
if rs.isUncountable(word) {
|
|
||||||
return word
|
|
||||||
}
|
|
||||||
for _, rule := range rs.plurals {
|
|
||||||
if rule.exact {
|
|
||||||
if word == rule.suffix {
|
|
||||||
return rule.replacement
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if strings.HasSuffix(word, rule.suffix) {
|
|
||||||
return replaceLast(word, rule.suffix, rule.replacement)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return word + "s"
|
|
||||||
}
|
|
||||||
|
|
||||||
// returns the singular form of a plural word
|
|
||||||
func (rs *Ruleset) Singularize(word string) string {
|
|
||||||
if len(word) == 0 {
|
|
||||||
return word
|
|
||||||
}
|
|
||||||
if rs.isUncountable(word) {
|
|
||||||
return word
|
|
||||||
}
|
|
||||||
for _, rule := range rs.singulars {
|
|
||||||
if rule.exact {
|
|
||||||
if word == rule.suffix {
|
|
||||||
return rule.replacement
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if strings.HasSuffix(word, rule.suffix) {
|
|
||||||
return replaceLast(word, rule.suffix, rule.replacement)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return word
|
|
||||||
}
|
|
||||||
|
|
||||||
// uppercase first character
|
|
||||||
func (rs *Ruleset) Capitalize(word string) string {
|
|
||||||
return strings.ToUpper(word[:1]) + word[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
// "dino_party" -> "DinoParty"
|
|
||||||
func (rs *Ruleset) Camelize(word string) string {
|
|
||||||
words := splitAtCaseChangeWithTitlecase(word)
|
|
||||||
return strings.Join(words, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// same as Camelcase but with first letter downcased
|
|
||||||
func (rs *Ruleset) CamelizeDownFirst(word string) string {
|
|
||||||
word = Camelize(word)
|
|
||||||
return strings.ToLower(word[:1]) + word[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Captitilize every word in sentance "hello there" -> "Hello There"
|
|
||||||
func (rs *Ruleset) Titleize(word string) string {
|
|
||||||
words := splitAtCaseChangeWithTitlecase(word)
|
|
||||||
return strings.Join(words, " ")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rs *Ruleset) safeCaseAcronyms(word string) string {
|
|
||||||
// convert an acroymn like HTML into Html
|
|
||||||
for _, rule := range rs.acronyms {
|
|
||||||
word = strings.Replace(word, rule.suffix, rule.replacement, -1)
|
|
||||||
}
|
|
||||||
return word
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rs *Ruleset) seperatedWords(word, sep string) string {
|
|
||||||
word = rs.safeCaseAcronyms(word)
|
|
||||||
words := splitAtCaseChange(word)
|
|
||||||
return strings.Join(words, sep)
|
|
||||||
}
|
|
||||||
|
|
||||||
// lowercase underscore version "BigBen" -> "big_ben"
|
|
||||||
func (rs *Ruleset) Underscore(word string) string {
|
|
||||||
return rs.seperatedWords(word, "_")
|
|
||||||
}
|
|
||||||
|
|
||||||
// First letter of sentance captitilized
|
|
||||||
// Uses custom friendly replacements via AddHuman()
|
|
||||||
func (rs *Ruleset) Humanize(word string) string {
|
|
||||||
word = replaceLast(word, "_id", "") // strip foreign key kinds
|
|
||||||
// replace and strings in humans list
|
|
||||||
for _, rule := range rs.humans {
|
|
||||||
word = strings.Replace(word, rule.suffix, rule.replacement, -1)
|
|
||||||
}
|
|
||||||
sentance := rs.seperatedWords(word, " ")
|
|
||||||
return strings.ToUpper(sentance[:1]) + sentance[1:]
|
|
||||||
}
|
|
||||||
|
|
||||||
// an underscored foreign key name "Person" -> "person_id"
|
|
||||||
func (rs *Ruleset) ForeignKey(word string) string {
|
|
||||||
return rs.Underscore(rs.Singularize(word)) + "_id"
|
|
||||||
}
|
|
||||||
|
|
||||||
// a foreign key (with an underscore) "Person" -> "personid"
|
|
||||||
func (rs *Ruleset) ForeignKeyCondensed(word string) string {
|
|
||||||
return rs.Underscore(word) + "id"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rails style pluralized table names: "SuperPerson" -> "super_people"
|
|
||||||
func (rs *Ruleset) Tableize(word string) string {
|
|
||||||
return rs.Pluralize(rs.Underscore(rs.Typeify(word)))
|
|
||||||
}
|
|
||||||
|
|
||||||
var notUrlSafe *regexp.Regexp = regexp.MustCompile(`[^\w\d\-_ ]`)
|
|
||||||
|
|
||||||
// param safe dasherized names like "my-param"
|
|
||||||
func (rs *Ruleset) Parameterize(word string) string {
|
|
||||||
return ParameterizeJoin(word, "-")
|
|
||||||
}
|
|
||||||
|
|
||||||
// param safe dasherized names with custom seperator
|
|
||||||
func (rs *Ruleset) ParameterizeJoin(word, sep string) string {
|
|
||||||
word = strings.ToLower(word)
|
|
||||||
word = rs.Asciify(word)
|
|
||||||
word = notUrlSafe.ReplaceAllString(word, "")
|
|
||||||
word = strings.Replace(word, " ", sep, -1)
|
|
||||||
if len(sep) > 0 {
|
|
||||||
squash, err := regexp.Compile(sep + "+")
|
|
||||||
if err == nil {
|
|
||||||
word = squash.ReplaceAllString(word, sep)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
word = strings.Trim(word, sep+" ")
|
|
||||||
return word
|
|
||||||
}
|
|
||||||
|
|
||||||
var lookalikes map[string]*regexp.Regexp = map[string]*regexp.Regexp{
|
|
||||||
"A": regexp.MustCompile(`À|Á|Â|Ã|Ä|Å`),
|
|
||||||
"AE": regexp.MustCompile(`Æ`),
|
|
||||||
"C": regexp.MustCompile(`Ç`),
|
|
||||||
"E": regexp.MustCompile(`È|É|Ê|Ë`),
|
|
||||||
"G": regexp.MustCompile(`Ğ`),
|
|
||||||
"I": regexp.MustCompile(`Ì|Í|Î|Ï|İ`),
|
|
||||||
"N": regexp.MustCompile(`Ñ`),
|
|
||||||
"O": regexp.MustCompile(`Ò|Ó|Ô|Õ|Ö|Ø`),
|
|
||||||
"S": regexp.MustCompile(`Ş`),
|
|
||||||
"U": regexp.MustCompile(`Ù|Ú|Û|Ü`),
|
|
||||||
"Y": regexp.MustCompile(`Ý`),
|
|
||||||
"ss": regexp.MustCompile(`ß`),
|
|
||||||
"a": regexp.MustCompile(`à|á|â|ã|ä|å`),
|
|
||||||
"ae": regexp.MustCompile(`æ`),
|
|
||||||
"c": regexp.MustCompile(`ç`),
|
|
||||||
"e": regexp.MustCompile(`è|é|ê|ë`),
|
|
||||||
"g": regexp.MustCompile(`ğ`),
|
|
||||||
"i": regexp.MustCompile(`ì|í|î|ï|ı`),
|
|
||||||
"n": regexp.MustCompile(`ñ`),
|
|
||||||
"o": regexp.MustCompile(`ò|ó|ô|õ|ö|ø`),
|
|
||||||
"s": regexp.MustCompile(`ş`),
|
|
||||||
"u": regexp.MustCompile(`ù|ú|û|ü|ũ|ū|ŭ|ů|ű|ų`),
|
|
||||||
"y": regexp.MustCompile(`ý|ÿ`),
|
|
||||||
}
|
|
||||||
|
|
||||||
// transforms latin characters like é -> e
|
|
||||||
func (rs *Ruleset) Asciify(word string) string {
|
|
||||||
for repl, regex := range lookalikes {
|
|
||||||
word = regex.ReplaceAllString(word, repl)
|
|
||||||
}
|
|
||||||
return word
|
|
||||||
}
|
|
||||||
|
|
||||||
var tablePrefix *regexp.Regexp = regexp.MustCompile(`^[^.]*\.`)
|
|
||||||
|
|
||||||
// "something_like_this" -> "SomethingLikeThis"
|
|
||||||
func (rs *Ruleset) Typeify(word string) string {
|
|
||||||
word = tablePrefix.ReplaceAllString(word, "")
|
|
||||||
return rs.Camelize(rs.Singularize(word))
|
|
||||||
}
|
|
||||||
|
|
||||||
// "SomeText" -> "some-text"
|
|
||||||
func (rs *Ruleset) Dasherize(word string) string {
|
|
||||||
return rs.seperatedWords(word, "-")
|
|
||||||
}
|
|
||||||
|
|
||||||
// "1031" -> "1031st"
|
|
||||||
func (rs *Ruleset) Ordinalize(str string) string {
|
|
||||||
number, err := strconv.Atoi(str)
|
|
||||||
if err != nil {
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
switch abs(number) % 100 {
|
|
||||||
case 11, 12, 13:
|
|
||||||
return fmt.Sprintf("%dth", number)
|
|
||||||
default:
|
|
||||||
switch abs(number) % 10 {
|
|
||||||
case 1:
|
|
||||||
return fmt.Sprintf("%dst", number)
|
|
||||||
case 2:
|
|
||||||
return fmt.Sprintf("%dnd", number)
|
|
||||||
case 3:
|
|
||||||
return fmt.Sprintf("%drd", number)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%dth", number)
|
|
||||||
}
|
|
||||||
|
|
||||||
/////////////////////////////////////////
|
|
||||||
// the default global ruleset
|
|
||||||
//////////////////////////////////////////
|
|
||||||
|
|
||||||
var defaultRuleset *Ruleset
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
defaultRuleset = NewDefaultRuleset()
|
|
||||||
}
|
|
||||||
|
|
||||||
func Uncountables() map[string]bool {
|
|
||||||
return defaultRuleset.Uncountables()
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddPlural(suffix, replacement string) {
|
|
||||||
defaultRuleset.AddPlural(suffix, replacement)
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddSingular(suffix, replacement string) {
|
|
||||||
defaultRuleset.AddSingular(suffix, replacement)
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddHuman(suffix, replacement string) {
|
|
||||||
defaultRuleset.AddHuman(suffix, replacement)
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddIrregular(singular, plural string) {
|
|
||||||
defaultRuleset.AddIrregular(singular, plural)
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddAcronym(word string) {
|
|
||||||
defaultRuleset.AddAcronym(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddUncountable(word string) {
|
|
||||||
defaultRuleset.AddUncountable(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Pluralize(word string) string {
|
|
||||||
return defaultRuleset.Pluralize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Singularize(word string) string {
|
|
||||||
return defaultRuleset.Singularize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Capitalize(word string) string {
|
|
||||||
return defaultRuleset.Capitalize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Camelize(word string) string {
|
|
||||||
return defaultRuleset.Camelize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func CamelizeDownFirst(word string) string {
|
|
||||||
return defaultRuleset.CamelizeDownFirst(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Titleize(word string) string {
|
|
||||||
return defaultRuleset.Titleize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Underscore(word string) string {
|
|
||||||
return defaultRuleset.Underscore(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Humanize(word string) string {
|
|
||||||
return defaultRuleset.Humanize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ForeignKey(word string) string {
|
|
||||||
return defaultRuleset.ForeignKey(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ForeignKeyCondensed(word string) string {
|
|
||||||
return defaultRuleset.ForeignKeyCondensed(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Tableize(word string) string {
|
|
||||||
return defaultRuleset.Tableize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Parameterize(word string) string {
|
|
||||||
return defaultRuleset.Parameterize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func ParameterizeJoin(word, sep string) string {
|
|
||||||
return defaultRuleset.ParameterizeJoin(word, sep)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Typeify(word string) string {
|
|
||||||
return defaultRuleset.Typeify(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Dasherize(word string) string {
|
|
||||||
return defaultRuleset.Dasherize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Ordinalize(word string) string {
|
|
||||||
return defaultRuleset.Ordinalize(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Asciify(word string) string {
|
|
||||||
return defaultRuleset.Asciify(word)
|
|
||||||
}
|
|
||||||
|
|
||||||
// helper funcs
|
|
||||||
|
|
||||||
func reverse(s string) string {
|
|
||||||
o := make([]rune, utf8.RuneCountInString(s))
|
|
||||||
i := len(o)
|
|
||||||
for _, c := range s {
|
|
||||||
i--
|
|
||||||
o[i] = c
|
|
||||||
}
|
|
||||||
return string(o)
|
|
||||||
}
|
|
||||||
|
|
||||||
func isSpacerChar(c rune) bool {
|
|
||||||
switch {
|
|
||||||
case c == rune("_"[0]):
|
|
||||||
return true
|
|
||||||
case c == rune(" "[0]):
|
|
||||||
return true
|
|
||||||
case c == rune(":"[0]):
|
|
||||||
return true
|
|
||||||
case c == rune("-"[0]):
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func splitAtCaseChange(s string) []string {
|
|
||||||
words := make([]string, 0)
|
|
||||||
word := make([]rune, 0)
|
|
||||||
for _, c := range s {
|
|
||||||
spacer := isSpacerChar(c)
|
|
||||||
if len(word) > 0 {
|
|
||||||
if unicode.IsUpper(c) || spacer {
|
|
||||||
words = append(words, string(word))
|
|
||||||
word = make([]rune, 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !spacer {
|
|
||||||
word = append(word, unicode.ToLower(c))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
words = append(words, string(word))
|
|
||||||
return words
|
|
||||||
}
|
|
||||||
|
|
||||||
func splitAtCaseChangeWithTitlecase(s string) []string {
|
|
||||||
words := make([]string, 0)
|
|
||||||
word := make([]rune, 0)
|
|
||||||
for _, c := range s {
|
|
||||||
spacer := isSpacerChar(c)
|
|
||||||
if len(word) > 0 {
|
|
||||||
if unicode.IsUpper(c) || spacer {
|
|
||||||
words = append(words, string(word))
|
|
||||||
word = make([]rune, 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !spacer {
|
|
||||||
if len(word) > 0 {
|
|
||||||
word = append(word, unicode.ToLower(c))
|
|
||||||
} else {
|
|
||||||
word = append(word, unicode.ToUpper(c))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
words = append(words, string(word))
|
|
||||||
return words
|
|
||||||
}
|
|
||||||
|
|
||||||
func replaceLast(s, match, repl string) string {
|
|
||||||
// reverse strings
|
|
||||||
srev := reverse(s)
|
|
||||||
mrev := reverse(match)
|
|
||||||
rrev := reverse(repl)
|
|
||||||
// match first and reverse back
|
|
||||||
return reverse(strings.Replace(srev, mrev, rrev, 1))
|
|
||||||
}
|
|
||||||
|
|
||||||
func abs(x int) int {
|
|
||||||
if x < 0 {
|
|
||||||
return -x
|
|
||||||
}
|
|
||||||
return x
|
|
||||||
}
|
|
1
vendor/github.com/go-openapi/loads/.drone.sec
generated
vendored
1
vendor/github.com/go-openapi/loads/.drone.sec
generated
vendored
|
@ -1 +0,0 @@
|
||||||
eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.xUjixvmMMeampw0Doyr_XLvcV5ICmDgDFmlcWqgmO84O3Hwn6dqbMkwOjpKMOyEJW_98b5Om5ED59IFt2S0T_OarlrsJL8jOK5fqxSMNXy2w8LfI-e5l1URverW41ofAVK8m9wK05q2BSJM_M6PyyODaQeDBiCVK1HreMZBlXpuUDVtBMPILQoga0eSZOsTR3DYEpZIS0A0Rsa5yIhMYR5d5-JMYqbqOCB7tNJ-BM83OzYgL7Hrz0J15kqaJmhQ-GJoMJDzOemSO9KxLCOfSPp11R_G3Mfd48xYnuiRuPOTakbOCLxuYviH6uoGVIOhnMyY9qKiDKbOn4BQUi1-igA.6qjQzq9nzAxRRKV_.z79R5cMFAEuEaAh6U9ykiL8oIqzMbs_I2C-hSFRh3HYRJ4fTB-9LrcbF0uASIOq7bBn4OQzW-0QFwYOs1uaawmrByGngV5d0afiZf_LBKcmTF2vtxRi_A_nxD-EHoPmh3lKBU5WNDe_8kLjEeS89HeyyFPuv5iQbqhzdqPFohHKVigwVqVYYLjB8GWQ4t7tC4c8l5rHanaXf71W0e3op2m8bebpZL0JPGhnULVA1oU27TYeLsO112JkIYtBwZxzvAs--bBFoKeGJWVMFzrKN68UACGZ9RFw0uGJbBmVC4-jRuIc6XpqeEqw3KG-rjFzkeEor3575qW-8kiXYqpub9SFUc3SSZkxJ8hB3SrnMBOuDUSenrXNpAbltmV3KAALzN3_bMBQuihwSRIn0Hg7-Dpni8BieMe44RMDvRu6p_71aeU_KW4V7Umy_h8gpIvQFuKGdTQH2ahsyCXL0ojqjMbVMdoWpDQTQ2_Fy8Qt_p2kJ8BgDo-1Akd4a6BNU2NGqsdnrJmtVKcTqLBadf9ylCwxHdGVrtNYORALSms2T6Q1s-poQnMjIwN8lnUD8ABUBpt4uVtrYkiWPVwrwywLQeiHhR-pboe_53kWDAx4Hy4rpbKsaxanYhy_bEbAYKb3aIUA.75GD4kRBCQdcGFYP1QYdCg
|
|
39
vendor/github.com/go-openapi/loads/.drone.yml
generated
vendored
39
vendor/github.com/go-openapi/loads/.drone.yml
generated
vendored
|
@ -1,39 +0,0 @@
|
||||||
clone:
|
|
||||||
path: github.com/go-openapi/loads
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
GO_VERSION:
|
|
||||||
- "1.6"
|
|
||||||
|
|
||||||
build:
|
|
||||||
integration:
|
|
||||||
image: golang:$$GO_VERSION
|
|
||||||
pull: true
|
|
||||||
environment:
|
|
||||||
GOCOVMODE: "count"
|
|
||||||
commands:
|
|
||||||
- go get -u github.com/axw/gocov/gocov
|
|
||||||
- go get -u gopkg.in/matm/v1/gocov-html
|
|
||||||
- go get -u github.com/cee-dub/go-junit-report
|
|
||||||
- go get -u github.com/stretchr/testify/assert
|
|
||||||
- go get -u gopkg.in/yaml.v2
|
|
||||||
- go get -u github.com/go-openapi/swag
|
|
||||||
- go get -u github.com/go-openapi/analysis
|
|
||||||
- go get -u github.com/go-openapi/spec
|
|
||||||
- ./hack/build-drone.sh
|
|
||||||
|
|
||||||
notify:
|
|
||||||
slack:
|
|
||||||
channel: bots
|
|
||||||
webhook_url: $$SLACK_URL
|
|
||||||
username: drone
|
|
||||||
|
|
||||||
publish:
|
|
||||||
coverage:
|
|
||||||
server: https://coverage.vmware.run
|
|
||||||
token: $$GITHUB_TOKEN
|
|
||||||
# threshold: 70
|
|
||||||
# must_increase: true
|
|
||||||
when:
|
|
||||||
matrix:
|
|
||||||
GO_VERSION: "1.6"
|
|
26
vendor/github.com/go-openapi/loads/.editorconfig
generated
vendored
26
vendor/github.com/go-openapi/loads/.editorconfig
generated
vendored
|
@ -1,26 +0,0 @@
|
||||||
# top-most EditorConfig file
|
|
||||||
root = true
|
|
||||||
|
|
||||||
# Unix-style newlines with a newline ending every file
|
|
||||||
[*]
|
|
||||||
end_of_line = lf
|
|
||||||
insert_final_newline = true
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
trim_trailing_whitespace = true
|
|
||||||
|
|
||||||
# Set default charset
|
|
||||||
[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
|
|
||||||
charset = utf-8
|
|
||||||
|
|
||||||
# Tab indentation (no size specified)
|
|
||||||
[*.go]
|
|
||||||
indent_style = tab
|
|
||||||
|
|
||||||
[*.md]
|
|
||||||
trim_trailing_whitespace = false
|
|
||||||
|
|
||||||
# Matches the exact files either package.json or .travis.yml
|
|
||||||
[{package.json,.travis.yml}]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
4
vendor/github.com/go-openapi/loads/.gitignore
generated
vendored
4
vendor/github.com/go-openapi/loads/.gitignore
generated
vendored
|
@ -1,4 +0,0 @@
|
||||||
secrets.yml
|
|
||||||
coverage.out
|
|
||||||
profile.cov
|
|
||||||
profile.out
|
|
18
vendor/github.com/go-openapi/loads/.travis.yml
generated
vendored
18
vendor/github.com/go-openapi/loads/.travis.yml
generated
vendored
|
@ -1,18 +0,0 @@
|
||||||
after_success:
|
|
||||||
- bash <(curl -s https://codecov.io/bash)
|
|
||||||
go:
|
|
||||||
- '1.9'
|
|
||||||
- 1.10.x
|
|
||||||
- 1.11.x
|
|
||||||
install:
|
|
||||||
- go get -u github.com/stretchr/testify
|
|
||||||
- go get -u github.com/go-openapi/analysis
|
|
||||||
- go get -u github.com/go-openapi/spec
|
|
||||||
- go get -u github.com/go-openapi/swag
|
|
||||||
- go get -u gopkg.in/yaml.v2
|
|
||||||
language: go
|
|
||||||
notifications:
|
|
||||||
slack:
|
|
||||||
secure: OxkPwVp35qBTUilgWC8xykSj+sGMcj0h8IIOKD+Rflx2schZVlFfdYdyVBM+s9OqeOfvtuvnR9v1Ye2rPKAvcjWdC4LpRGUsgmItZaI6Um8Aj6+K9udCw5qrtZVfOVmRu8LieH//XznWWKdOultUuniW0MLqw5+II87Gd00RWbCGi0hk0PykHe7uK+PDA2BEbqyZ2WKKYCvfB3j+0nrFOHScXqnh0V05l2E83J4+Sgy1fsPy+1WdX58ZlNBG333ibaC1FS79XvKSmTgKRkx3+YBo97u6ZtUmJa5WZjf2OdLG3KIckGWAv6R5xgxeU31N0Ng8L332w/Edpp2O/M2bZwdnKJ8hJQikXIAQbICbr+lTDzsoNzMdEIYcHpJ5hjPbiUl3Bmd+Jnsjf5McgAZDiWIfpCKZ29tPCEkVwRsOCqkyPRMNMzHHmoja495P5jR+ODS7+J8RFg5xgcnOgpP9D4Wlhztlf5WyZMpkLxTUD+bZq2SRf50HfHFXTkfq22zPl3d1eq0yrLwh/Z/fWKkfb6SyysROL8y6s8u3dpFX1YHSg0BR6i913h4aoZw9B2BG27cafLLTwKYsp2dFo1PWl4O6u9giFJIeqwloZHLKKrwh0cBFhB7RH0I58asxkZpCH6uWjJierahmHe7iS+E6i+9oCHkOZ59hmCYNimIs3hM=
|
|
||||||
script:
|
|
||||||
- ./hack/coverage
|
|
74
vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
generated
vendored
74
vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
generated
vendored
|
@ -1,74 +0,0 @@
|
||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
In the interest of fostering an open and welcoming environment, we as
|
|
||||||
contributors and maintainers pledge to making participation in our project and
|
|
||||||
our community a harassment-free experience for everyone, regardless of age, body
|
|
||||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
|
||||||
nationality, personal appearance, race, religion, or sexual identity and
|
|
||||||
orientation.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to creating a positive environment
|
|
||||||
include:
|
|
||||||
|
|
||||||
* Using welcoming and inclusive language
|
|
||||||
* Being respectful of differing viewpoints and experiences
|
|
||||||
* Gracefully accepting constructive criticism
|
|
||||||
* Focusing on what is best for the community
|
|
||||||
* Showing empathy towards other community members
|
|
||||||
|
|
||||||
Examples of unacceptable behavior by participants include:
|
|
||||||
|
|
||||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
|
||||||
advances
|
|
||||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
|
||||||
* Public or private harassment
|
|
||||||
* Publishing others' private information, such as a physical or electronic
|
|
||||||
address, without explicit permission
|
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
|
||||||
professional setting
|
|
||||||
|
|
||||||
## Our Responsibilities
|
|
||||||
|
|
||||||
Project maintainers are responsible for clarifying the standards of acceptable
|
|
||||||
behavior and are expected to take appropriate and fair corrective action in
|
|
||||||
response to any instances of unacceptable behavior.
|
|
||||||
|
|
||||||
Project maintainers have the right and responsibility to remove, edit, or
|
|
||||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
|
||||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
|
||||||
permanently any contributor for other behaviors that they deem inappropriate,
|
|
||||||
threatening, offensive, or harmful.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies both within project spaces and in public spaces
|
|
||||||
when an individual is representing the project or its community. Examples of
|
|
||||||
representing a project or community include using an official project e-mail
|
|
||||||
address, posting via an official social media account, or acting as an appointed
|
|
||||||
representative at an online or offline event. Representation of a project may be
|
|
||||||
further defined and clarified by project maintainers.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
|
||||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
|
||||||
complaints will be reviewed and investigated and will result in a response that
|
|
||||||
is deemed necessary and appropriate to the circumstances. The project team is
|
|
||||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
|
||||||
Further details of specific enforcement policies may be posted separately.
|
|
||||||
|
|
||||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
|
||||||
faith may face temporary or permanent repercussions as determined by other
|
|
||||||
members of the project's leadership.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
|
||||||
available at [http://contributor-covenant.org/version/1/4][version]
|
|
||||||
|
|
||||||
[homepage]: http://contributor-covenant.org
|
|
||||||
[version]: http://contributor-covenant.org/version/1/4/
|
|
202
vendor/github.com/go-openapi/loads/LICENSE
generated
vendored
202
vendor/github.com/go-openapi/loads/LICENSE
generated
vendored
|
@ -1,202 +0,0 @@
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
5
vendor/github.com/go-openapi/loads/README.md
generated
vendored
5
vendor/github.com/go-openapi/loads/README.md
generated
vendored
|
@ -1,5 +0,0 @@
|
||||||
# Loads OAI specs [![Build Status](https://travis-ci.org/go-openapi/loads.svg?branch=master)](https://travis-ci.org/go-openapi/loads) [![codecov](https://codecov.io/gh/go-openapi/loads/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/loads) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
|
|
||||||
|
|
||||||
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/loads/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/loads?status.svg)](http://godoc.org/github.com/go-openapi/loads)
|
|
||||||
|
|
||||||
Loading of OAI specification documents from local or remote locations.
|
|
30
vendor/github.com/go-openapi/loads/fmts/yaml.go
generated
vendored
30
vendor/github.com/go-openapi/loads/fmts/yaml.go
generated
vendored
|
@ -1,30 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package fmts
|
|
||||||
|
|
||||||
import "github.com/go-openapi/swag"
|
|
||||||
|
|
||||||
var (
|
|
||||||
// YAMLMatcher matches yaml
|
|
||||||
YAMLMatcher = swag.YAMLMatcher
|
|
||||||
// YAMLToJSON converts YAML unmarshaled data into json compatible data
|
|
||||||
YAMLToJSON = swag.YAMLToJSON
|
|
||||||
// BytesToYAMLDoc converts raw bytes to a map[string]interface{}
|
|
||||||
BytesToYAMLDoc = swag.BytesToYAMLDoc
|
|
||||||
// YAMLDoc loads a yaml document from either http or a file and converts it to json
|
|
||||||
YAMLDoc = swag.YAMLDoc
|
|
||||||
// YAMLData loads a yaml document from either http or a file
|
|
||||||
YAMLData = swag.YAMLData
|
|
||||||
)
|
|
23
vendor/github.com/go-openapi/loads/go.mod
generated
vendored
23
vendor/github.com/go-openapi/loads/go.mod
generated
vendored
|
@ -1,23 +0,0 @@
|
||||||
module github.com/go-openapi/loads
|
|
||||||
|
|
||||||
require (
|
|
||||||
github.com/PuerkitoBio/purell v1.1.0 // indirect
|
|
||||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
|
|
||||||
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf // indirect
|
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
|
||||||
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb // indirect
|
|
||||||
github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277
|
|
||||||
github.com/go-openapi/errors v0.17.0 // indirect
|
|
||||||
github.com/go-openapi/jsonpointer v0.17.0 // indirect
|
|
||||||
github.com/go-openapi/jsonreference v0.17.0 // indirect
|
|
||||||
github.com/go-openapi/spec v0.17.0
|
|
||||||
github.com/go-openapi/strfmt v0.17.0 // indirect
|
|
||||||
github.com/go-openapi/swag v0.17.0
|
|
||||||
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 // indirect
|
|
||||||
github.com/mitchellh/mapstructure v1.1.2 // indirect
|
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
|
||||||
github.com/stretchr/testify v1.2.2
|
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58 // indirect
|
|
||||||
golang.org/x/text v0.3.0 // indirect
|
|
||||||
gopkg.in/yaml.v2 v2.2.1
|
|
||||||
)
|
|
36
vendor/github.com/go-openapi/loads/go.sum
generated
vendored
36
vendor/github.com/go-openapi/loads/go.sum
generated
vendored
|
@ -1,36 +0,0 @@
|
||||||
github.com/PuerkitoBio/purell v1.1.0 h1:rmGxhojJlM0tuKtfdvliR84CFHljx9ag64t2xmVkjK4=
|
|
||||||
github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
|
||||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
|
|
||||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
|
||||||
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf h1:eg0MeVzsP1G42dRafH3vf+al2vQIJU0YHX+1Tw87oco=
|
|
||||||
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
|
||||||
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb h1:D4uzjWwKYQ5XnAvUbuvHW93esHg7F8N/OYeBBcJoTr0=
|
|
||||||
github.com/globalsign/mgo v0.0.0-20180905125535-1ca0a4f7cbcb/go.mod h1:xkRDCp4j0OGD1HRkm4kmhM+pmpv3AKq5SU7GMg4oO/Q=
|
|
||||||
github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277 h1:Cjl5yf/RidkszNOmV0+rf35yjOocQ1UTTVwEmxnr6Ls=
|
|
||||||
github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI=
|
|
||||||
github.com/go-openapi/errors v0.17.0 h1:47T+LqPrQUxFXQnB22aLBfsTRFSqWp5y4OiFgQm+/Lw=
|
|
||||||
github.com/go-openapi/errors v0.17.0/go.mod h1:La0D2x9HoXenv7MDEiAv6vWoe84CXFo0PQRk/jdQlww=
|
|
||||||
github.com/go-openapi/jsonpointer v0.17.0 h1:Bpl2DtZ6k7wKqfFs7e+4P08+M9I3FQgn09a1UsRUQbk=
|
|
||||||
github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0=
|
|
||||||
github.com/go-openapi/jsonreference v0.17.0 h1:d/o7/fsLWWQZACbihvZxcyLQ59jfUVs7WOJv/ak7T7A=
|
|
||||||
github.com/go-openapi/jsonreference v0.17.0/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
|
|
||||||
github.com/go-openapi/spec v0.17.0 h1:MM5YaXBdBOEcjGHW5WayrAY5Ze2ydNyy71JHeTi7xUc=
|
|
||||||
github.com/go-openapi/spec v0.17.0/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc=
|
|
||||||
github.com/go-openapi/strfmt v0.17.0 h1:79+bCyGHowS3rkr6z8RcG5jVzdKpeKXlDuW6yqE50TM=
|
|
||||||
github.com/go-openapi/strfmt v0.17.0/go.mod h1:/bCWipNKhC9QMhD8HRe2EGbU8G0D4Yvh0G6X4k1Xwvg=
|
|
||||||
github.com/go-openapi/swag v0.17.0 h1:7wu+dZ5k83kvUWeAb+WUkFiUhDzwGqzTR/NhWzeo1JU=
|
|
||||||
github.com/go-openapi/swag v0.17.0/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I=
|
|
||||||
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic=
|
|
||||||
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
|
||||||
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
|
||||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
|
||||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
|
||||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE=
|
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
279
vendor/github.com/go-openapi/loads/spec.go
generated
vendored
279
vendor/github.com/go-openapi/loads/spec.go
generated
vendored
|
@ -1,279 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package loads
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
|
|
||||||
"github.com/go-openapi/analysis"
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
"github.com/go-openapi/swag"
|
|
||||||
)
|
|
||||||
|
|
||||||
// JSONDoc loads a json document from either a file or a remote url
|
|
||||||
func JSONDoc(path string) (json.RawMessage, error) {
|
|
||||||
data, err := swag.LoadFromFileOrHTTP(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return json.RawMessage(data), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// DocLoader represents a doc loader type
|
|
||||||
type DocLoader func(string) (json.RawMessage, error)
|
|
||||||
|
|
||||||
// DocMatcher represents a predicate to check if a loader matches
|
|
||||||
type DocMatcher func(string) bool
|
|
||||||
|
|
||||||
var (
|
|
||||||
loaders *loader
|
|
||||||
defaultLoader *loader
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
defaultLoader = &loader{Match: func(_ string) bool { return true }, Fn: JSONDoc}
|
|
||||||
loaders = defaultLoader
|
|
||||||
spec.PathLoader = loaders.Fn
|
|
||||||
AddLoader(swag.YAMLMatcher, swag.YAMLDoc)
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddLoader for a document
|
|
||||||
func AddLoader(predicate DocMatcher, load DocLoader) {
|
|
||||||
prev := loaders
|
|
||||||
loaders = &loader{
|
|
||||||
Match: predicate,
|
|
||||||
Fn: load,
|
|
||||||
Next: prev,
|
|
||||||
}
|
|
||||||
spec.PathLoader = loaders.Fn
|
|
||||||
}
|
|
||||||
|
|
||||||
type loader struct {
|
|
||||||
Fn DocLoader
|
|
||||||
Match DocMatcher
|
|
||||||
Next *loader
|
|
||||||
}
|
|
||||||
|
|
||||||
// JSONSpec loads a spec from a json document
|
|
||||||
func JSONSpec(path string) (*Document, error) {
|
|
||||||
data, err := JSONDoc(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// convert to json
|
|
||||||
return Analyzed(json.RawMessage(data), "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Document represents a swagger spec document
|
|
||||||
type Document struct {
|
|
||||||
// specAnalyzer
|
|
||||||
Analyzer *analysis.Spec
|
|
||||||
spec *spec.Swagger
|
|
||||||
specFilePath string
|
|
||||||
origSpec *spec.Swagger
|
|
||||||
schema *spec.Schema
|
|
||||||
raw json.RawMessage
|
|
||||||
}
|
|
||||||
|
|
||||||
// Embedded returns a Document based on embedded specs. No analysis is required
|
|
||||||
func Embedded(orig, flat json.RawMessage) (*Document, error) {
|
|
||||||
var origSpec, flatSpec spec.Swagger
|
|
||||||
if err := json.Unmarshal(orig, &origSpec); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := json.Unmarshal(flat, &flatSpec); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &Document{
|
|
||||||
raw: orig,
|
|
||||||
origSpec: &origSpec,
|
|
||||||
spec: &flatSpec,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Spec loads a new spec document
|
|
||||||
func Spec(path string) (*Document, error) {
|
|
||||||
specURL, err := url.Parse(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
var lastErr error
|
|
||||||
for l := loaders.Next; l != nil; l = l.Next {
|
|
||||||
if loaders.Match(specURL.Path) {
|
|
||||||
b, err2 := loaders.Fn(path)
|
|
||||||
if err2 != nil {
|
|
||||||
lastErr = err2
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
doc, err := Analyzed(b, "")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if doc != nil {
|
|
||||||
doc.specFilePath = path
|
|
||||||
}
|
|
||||||
return doc, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if lastErr != nil {
|
|
||||||
return nil, lastErr
|
|
||||||
}
|
|
||||||
b, err := defaultLoader.Fn(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
document, err := Analyzed(b, "")
|
|
||||||
if document != nil {
|
|
||||||
document.specFilePath = path
|
|
||||||
}
|
|
||||||
|
|
||||||
return document, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Analyzed creates a new analyzed spec document
|
|
||||||
func Analyzed(data json.RawMessage, version string) (*Document, error) {
|
|
||||||
if version == "" {
|
|
||||||
version = "2.0"
|
|
||||||
}
|
|
||||||
if version != "2.0" {
|
|
||||||
return nil, fmt.Errorf("spec version %q is not supported", version)
|
|
||||||
}
|
|
||||||
|
|
||||||
raw := data
|
|
||||||
trimmed := bytes.TrimSpace(data)
|
|
||||||
if len(trimmed) > 0 {
|
|
||||||
if trimmed[0] != '{' && trimmed[0] != '[' {
|
|
||||||
yml, err := swag.BytesToYAMLDoc(trimmed)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("analyzed: %v", err)
|
|
||||||
}
|
|
||||||
d, err := swag.YAMLToJSON(yml)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("analyzed: %v", err)
|
|
||||||
}
|
|
||||||
raw = d
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
swspec := new(spec.Swagger)
|
|
||||||
if err := json.Unmarshal(raw, swspec); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
origsqspec := new(spec.Swagger)
|
|
||||||
if err := json.Unmarshal(raw, origsqspec); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
d := &Document{
|
|
||||||
Analyzer: analysis.New(swspec),
|
|
||||||
schema: spec.MustLoadSwagger20Schema(),
|
|
||||||
spec: swspec,
|
|
||||||
raw: raw,
|
|
||||||
origSpec: origsqspec,
|
|
||||||
}
|
|
||||||
return d, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Expanded expands the ref fields in the spec document and returns a new spec document
|
|
||||||
func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) {
|
|
||||||
swspec := new(spec.Swagger)
|
|
||||||
if err := json.Unmarshal(d.raw, swspec); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var expandOptions *spec.ExpandOptions
|
|
||||||
if len(options) > 0 {
|
|
||||||
expandOptions = options[0]
|
|
||||||
} else {
|
|
||||||
expandOptions = &spec.ExpandOptions{
|
|
||||||
RelativeBase: d.specFilePath,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := spec.ExpandSpec(swspec, expandOptions); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
dd := &Document{
|
|
||||||
Analyzer: analysis.New(swspec),
|
|
||||||
spec: swspec,
|
|
||||||
specFilePath: d.specFilePath,
|
|
||||||
schema: spec.MustLoadSwagger20Schema(),
|
|
||||||
raw: d.raw,
|
|
||||||
origSpec: d.origSpec,
|
|
||||||
}
|
|
||||||
return dd, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// BasePath the base path for this spec
|
|
||||||
func (d *Document) BasePath() string {
|
|
||||||
return d.spec.BasePath
|
|
||||||
}
|
|
||||||
|
|
||||||
// Version returns the version of this spec
|
|
||||||
func (d *Document) Version() string {
|
|
||||||
return d.spec.Swagger
|
|
||||||
}
|
|
||||||
|
|
||||||
// Schema returns the swagger 2.0 schema
|
|
||||||
func (d *Document) Schema() *spec.Schema {
|
|
||||||
return d.schema
|
|
||||||
}
|
|
||||||
|
|
||||||
// Spec returns the swagger spec object model
|
|
||||||
func (d *Document) Spec() *spec.Swagger {
|
|
||||||
return d.spec
|
|
||||||
}
|
|
||||||
|
|
||||||
// Host returns the host for the API
|
|
||||||
func (d *Document) Host() string {
|
|
||||||
return d.spec.Host
|
|
||||||
}
|
|
||||||
|
|
||||||
// Raw returns the raw swagger spec as json bytes
|
|
||||||
func (d *Document) Raw() json.RawMessage {
|
|
||||||
return d.raw
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Document) OrigSpec() *spec.Swagger {
|
|
||||||
return d.origSpec
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResetDefinitions gives a shallow copy with the models reset
|
|
||||||
func (d *Document) ResetDefinitions() *Document {
|
|
||||||
defs := make(map[string]spec.Schema, len(d.origSpec.Definitions))
|
|
||||||
for k, v := range d.origSpec.Definitions {
|
|
||||||
defs[k] = v
|
|
||||||
}
|
|
||||||
|
|
||||||
d.spec.Definitions = defs
|
|
||||||
return d
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pristine creates a new pristine document instance based on the input data
|
|
||||||
func (d *Document) Pristine() *Document {
|
|
||||||
dd, _ := Analyzed(d.Raw(), d.Version())
|
|
||||||
return dd
|
|
||||||
}
|
|
||||||
|
|
||||||
// SpecFilePath returns the file path of the spec if one is defined
|
|
||||||
func (d *Document) SpecFilePath() string {
|
|
||||||
return d.specFilePath
|
|
||||||
}
|
|
26
vendor/github.com/go-openapi/runtime/.editorconfig
generated
vendored
26
vendor/github.com/go-openapi/runtime/.editorconfig
generated
vendored
|
@ -1,26 +0,0 @@
|
||||||
# top-most EditorConfig file
|
|
||||||
root = true
|
|
||||||
|
|
||||||
# Unix-style newlines with a newline ending every file
|
|
||||||
[*]
|
|
||||||
end_of_line = lf
|
|
||||||
insert_final_newline = true
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
trim_trailing_whitespace = true
|
|
||||||
|
|
||||||
# Set default charset
|
|
||||||
[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
|
|
||||||
charset = utf-8
|
|
||||||
|
|
||||||
# Tab indentation (no size specified)
|
|
||||||
[*.go]
|
|
||||||
indent_style = tab
|
|
||||||
|
|
||||||
[*.md]
|
|
||||||
trim_trailing_whitespace = false
|
|
||||||
|
|
||||||
# Matches the exact files either package.json or .travis.yml
|
|
||||||
[{package.json,.travis.yml}]
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
5
vendor/github.com/go-openapi/runtime/.gitignore
generated
vendored
5
vendor/github.com/go-openapi/runtime/.gitignore
generated
vendored
|
@ -1,5 +0,0 @@
|
||||||
secrets.yml
|
|
||||||
coverage.out
|
|
||||||
*.cov
|
|
||||||
*.out
|
|
||||||
playground
|
|
24
vendor/github.com/go-openapi/runtime/.travis.yml
generated
vendored
24
vendor/github.com/go-openapi/runtime/.travis.yml
generated
vendored
|
@ -1,24 +0,0 @@
|
||||||
after_success:
|
|
||||||
- bash <(curl -s https://codecov.io/bash)
|
|
||||||
go:
|
|
||||||
- '1.9'
|
|
||||||
- 1.10.x
|
|
||||||
- 1.11.x
|
|
||||||
install:
|
|
||||||
- go get -u github.com/axw/gocov/gocov
|
|
||||||
- go get -u gopkg.in/matm/v1/gocov-html
|
|
||||||
- go get -u github.com/cee-dub/go-junit-report
|
|
||||||
- go get -u github.com/stretchr/testify/assert
|
|
||||||
- go get -u gopkg.in/yaml.v2
|
|
||||||
- go get -u github.com/go-openapi/analysis
|
|
||||||
- go get -u github.com/go-openapi/errors
|
|
||||||
- go get -u github.com/go-openapi/loads
|
|
||||||
- go get -u github.com/go-openapi/strfmt
|
|
||||||
- go get -u github.com/go-openapi/validate
|
|
||||||
- go get -u github.com/docker/go-units
|
|
||||||
language: go
|
|
||||||
notifications:
|
|
||||||
slack:
|
|
||||||
secure: EmObnQuM9Mw8J9vpFaKKHqSMN4Wsr/A9+v7ewAD5cEhA0T1P4m7MbJMiJOhxUhj/X+BFh2DamW+P2lT8mybj5wg8wnkQ2BteKA8Tawi6f9PRw2NRheO8tAi8o/npLnlmet0kc93mn+oLuqHw36w4+j5mkOl2FghkfGiUVhwrhkCP7KXQN+3TU87e+/HzQumlJ3nsE+6terVxkH3PmaUTsS5ONaODZfuxFpfb7RsoEl3skHf6d+tr+1nViLxxly7558Nc33C+W1mr0qiEvMLZ+kJ/CpGWBJ6CUJM3jm6hNe2eMuIPwEK2hxZob8c7n22VPap4K6a0bBRoydoDXaba+2sD7Ym6ivDO/DVyL44VeBBLyIiIBylDGQdZH+6SoWm90Qe/i7tnY/T5Ao5igT8f3cfQY1c3EsTfqmlDfrhmACBmwSlgkdVBLTprHL63JMY24LWmh4jhxsmMRZhCL4dze8su1w6pLN/pD1pGHtKYCEVbdTmaM3PblNRFf12XB7qosmQsgUndH4Vq3bTbU0s1pKjeDhRyLvFzvR0TBbo0pDLEoF1A/i5GVFWa7yLZNUDudQERRh7qv/xBl2excIaQ1sV4DSVm7bAE9l6Kp+yeHQJW2uN6Y3X8wu9gB9nv9l5HBze7wh8KE6PyWAOLYYqZg9/sAtsv/2GcQqXcKFF1zcA=
|
|
||||||
script:
|
|
||||||
- ./hack/coverage
|
|
74
vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md
generated
vendored
74
vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md
generated
vendored
|
@ -1,74 +0,0 @@
|
||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
In the interest of fostering an open and welcoming environment, we as
|
|
||||||
contributors and maintainers pledge to making participation in our project and
|
|
||||||
our community a harassment-free experience for everyone, regardless of age, body
|
|
||||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
|
||||||
nationality, personal appearance, race, religion, or sexual identity and
|
|
||||||
orientation.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to creating a positive environment
|
|
||||||
include:
|
|
||||||
|
|
||||||
* Using welcoming and inclusive language
|
|
||||||
* Being respectful of differing viewpoints and experiences
|
|
||||||
* Gracefully accepting constructive criticism
|
|
||||||
* Focusing on what is best for the community
|
|
||||||
* Showing empathy towards other community members
|
|
||||||
|
|
||||||
Examples of unacceptable behavior by participants include:
|
|
||||||
|
|
||||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
|
||||||
advances
|
|
||||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
|
||||||
* Public or private harassment
|
|
||||||
* Publishing others' private information, such as a physical or electronic
|
|
||||||
address, without explicit permission
|
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
|
||||||
professional setting
|
|
||||||
|
|
||||||
## Our Responsibilities
|
|
||||||
|
|
||||||
Project maintainers are responsible for clarifying the standards of acceptable
|
|
||||||
behavior and are expected to take appropriate and fair corrective action in
|
|
||||||
response to any instances of unacceptable behavior.
|
|
||||||
|
|
||||||
Project maintainers have the right and responsibility to remove, edit, or
|
|
||||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
|
||||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
|
||||||
permanently any contributor for other behaviors that they deem inappropriate,
|
|
||||||
threatening, offensive, or harmful.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies both within project spaces and in public spaces
|
|
||||||
when an individual is representing the project or its community. Examples of
|
|
||||||
representing a project or community include using an official project e-mail
|
|
||||||
address, posting via an official social media account, or acting as an appointed
|
|
||||||
representative at an online or offline event. Representation of a project may be
|
|
||||||
further defined and clarified by project maintainers.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
|
||||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
|
||||||
complaints will be reviewed and investigated and will result in a response that
|
|
||||||
is deemed necessary and appropriate to the circumstances. The project team is
|
|
||||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
|
||||||
Further details of specific enforcement policies may be posted separately.
|
|
||||||
|
|
||||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
|
||||||
faith may face temporary or permanent repercussions as determined by other
|
|
||||||
members of the project's leadership.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
|
||||||
available at [http://contributor-covenant.org/version/1/4][version]
|
|
||||||
|
|
||||||
[homepage]: http://contributor-covenant.org
|
|
||||||
[version]: http://contributor-covenant.org/version/1/4/
|
|
202
vendor/github.com/go-openapi/runtime/LICENSE
generated
vendored
202
vendor/github.com/go-openapi/runtime/LICENSE
generated
vendored
|
@ -1,202 +0,0 @@
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
5
vendor/github.com/go-openapi/runtime/README.md
generated
vendored
5
vendor/github.com/go-openapi/runtime/README.md
generated
vendored
|
@ -1,5 +0,0 @@
|
||||||
# runtime [![Build Status](https://travis-ci.org/go-openapi/runtime.svg?branch=client-context)](https://travis-ci.org/go-openapi/runtime) [![codecov](https://codecov.io/gh/go-openapi/runtime/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/runtime) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
|
|
||||||
|
|
||||||
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/runtime/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/runtime?status.svg)](http://godoc.org/github.com/go-openapi/runtime)
|
|
||||||
|
|
||||||
The runtime component for use in codegeneration or as untyped usage.
|
|
151
vendor/github.com/go-openapi/runtime/bytestream.go
generated
vendored
151
vendor/github.com/go-openapi/runtime/bytestream.go
generated
vendored
|
@ -1,151 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package runtime
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"github.com/go-openapi/swag"
|
|
||||||
)
|
|
||||||
|
|
||||||
func defaultCloser() error { return nil }
|
|
||||||
|
|
||||||
type byteStreamOpt func(opts *byteStreamOpts)
|
|
||||||
|
|
||||||
// ClosesStream when the bytestream consumer or producer is finished
|
|
||||||
func ClosesStream(opts *byteStreamOpts) {
|
|
||||||
opts.Close = true
|
|
||||||
}
|
|
||||||
|
|
||||||
type byteStreamOpts struct {
|
|
||||||
Close bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// ByteStreamConsumer creates a consmer for byte streams,
|
|
||||||
// takes a Writer/BinaryUnmarshaler interface or binary slice by reference,
|
|
||||||
// and reads from the provided reader
|
|
||||||
func ByteStreamConsumer(opts ...byteStreamOpt) Consumer {
|
|
||||||
var vals byteStreamOpts
|
|
||||||
for _, opt := range opts {
|
|
||||||
opt(&vals)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ConsumerFunc(func(reader io.Reader, data interface{}) error {
|
|
||||||
if reader == nil {
|
|
||||||
return errors.New("ByteStreamConsumer requires a reader") // early exit
|
|
||||||
}
|
|
||||||
|
|
||||||
close := defaultCloser
|
|
||||||
if vals.Close {
|
|
||||||
if cl, ok := reader.(io.Closer); ok {
|
|
||||||
close = cl.Close
|
|
||||||
}
|
|
||||||
}
|
|
||||||
defer close()
|
|
||||||
|
|
||||||
if wrtr, ok := data.(io.Writer); ok {
|
|
||||||
_, err := io.Copy(wrtr, reader)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
_, err := buf.ReadFrom(reader)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
b := buf.Bytes()
|
|
||||||
|
|
||||||
if bu, ok := data.(encoding.BinaryUnmarshaler); ok {
|
|
||||||
return bu.UnmarshalBinary(b)
|
|
||||||
}
|
|
||||||
|
|
||||||
if t := reflect.TypeOf(data); data != nil && t.Kind() == reflect.Ptr {
|
|
||||||
v := reflect.Indirect(reflect.ValueOf(data))
|
|
||||||
if t = v.Type(); t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 {
|
|
||||||
v.SetBytes(b)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return fmt.Errorf("%v (%T) is not supported by the ByteStreamConsumer, %s",
|
|
||||||
data, data, "can be resolved by supporting Writer/BinaryUnmarshaler interface")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// ByteStreamProducer creates a producer for byte streams,
|
|
||||||
// takes a Reader/BinaryMarshaler interface or binary slice,
|
|
||||||
// and writes to a writer (essentially a pipe)
|
|
||||||
func ByteStreamProducer(opts ...byteStreamOpt) Producer {
|
|
||||||
var vals byteStreamOpts
|
|
||||||
for _, opt := range opts {
|
|
||||||
opt(&vals)
|
|
||||||
}
|
|
||||||
return ProducerFunc(func(writer io.Writer, data interface{}) error {
|
|
||||||
if writer == nil {
|
|
||||||
return errors.New("ByteStreamProducer requires a writer") // early exit
|
|
||||||
}
|
|
||||||
close := defaultCloser
|
|
||||||
if vals.Close {
|
|
||||||
if cl, ok := writer.(io.Closer); ok {
|
|
||||||
close = cl.Close
|
|
||||||
}
|
|
||||||
}
|
|
||||||
defer close()
|
|
||||||
|
|
||||||
if rdr, ok := data.(io.Reader); ok {
|
|
||||||
_, err := io.Copy(writer, rdr)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if bm, ok := data.(encoding.BinaryMarshaler); ok {
|
|
||||||
bytes, err := bm.MarshalBinary()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = writer.Write(bytes)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if data != nil {
|
|
||||||
if e, ok := data.(error); ok {
|
|
||||||
_, err := writer.Write([]byte(e.Error()))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
v := reflect.Indirect(reflect.ValueOf(data))
|
|
||||||
if t := v.Type(); t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 {
|
|
||||||
_, err := writer.Write(v.Bytes())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if t := v.Type(); t.Kind() == reflect.Struct || t.Kind() == reflect.Slice {
|
|
||||||
b, err := swag.WriteJSON(data)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
_, err = writer.Write(b)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return fmt.Errorf("%v (%T) is not supported by the ByteStreamProducer, %s",
|
|
||||||
data, data, "can be resolved by supporting Reader/BinaryMarshaler interface")
|
|
||||||
})
|
|
||||||
}
|
|
30
vendor/github.com/go-openapi/runtime/client_auth_info.go
generated
vendored
30
vendor/github.com/go-openapi/runtime/client_auth_info.go
generated
vendored
|
@ -1,30 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package runtime
|
|
||||||
|
|
||||||
import "github.com/go-openapi/strfmt"
|
|
||||||
|
|
||||||
// A ClientAuthInfoWriterFunc converts a function to a request writer interface
|
|
||||||
type ClientAuthInfoWriterFunc func(ClientRequest, strfmt.Registry) error
|
|
||||||
|
|
||||||
// AuthenticateRequest adds authentication data to the request
|
|
||||||
func (fn ClientAuthInfoWriterFunc) AuthenticateRequest(req ClientRequest, reg strfmt.Registry) error {
|
|
||||||
return fn(req, reg)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A ClientAuthInfoWriter implementor knows how to write authentication info to a request
|
|
||||||
type ClientAuthInfoWriter interface {
|
|
||||||
AuthenticateRequest(ClientRequest, strfmt.Registry) error
|
|
||||||
}
|
|
41
vendor/github.com/go-openapi/runtime/client_operation.go
generated
vendored
41
vendor/github.com/go-openapi/runtime/client_operation.go
generated
vendored
|
@ -1,41 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package runtime
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ClientOperation represents the context for a swagger operation to be submitted to the transport
|
|
||||||
type ClientOperation struct {
|
|
||||||
ID string
|
|
||||||
Method string
|
|
||||||
PathPattern string
|
|
||||||
ProducesMediaTypes []string
|
|
||||||
ConsumesMediaTypes []string
|
|
||||||
Schemes []string
|
|
||||||
AuthInfo ClientAuthInfoWriter
|
|
||||||
Params ClientRequestWriter
|
|
||||||
Reader ClientResponseReader
|
|
||||||
Context context.Context
|
|
||||||
Client *http.Client
|
|
||||||
}
|
|
||||||
|
|
||||||
// A ClientTransport implementor knows how to submit Request objects to some destination
|
|
||||||
type ClientTransport interface {
|
|
||||||
//Submit(string, RequestWriter, ResponseReader, AuthInfoWriter) (interface{}, error)
|
|
||||||
Submit(*ClientOperation) (interface{}, error)
|
|
||||||
}
|
|
100
vendor/github.com/go-openapi/runtime/client_request.go
generated
vendored
100
vendor/github.com/go-openapi/runtime/client_request.go
generated
vendored
|
@ -1,100 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package runtime
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"net/url"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/go-openapi/strfmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ClientRequestWriterFunc converts a function to a request writer interface
|
|
||||||
type ClientRequestWriterFunc func(ClientRequest, strfmt.Registry) error
|
|
||||||
|
|
||||||
// WriteToRequest adds data to the request
|
|
||||||
func (fn ClientRequestWriterFunc) WriteToRequest(req ClientRequest, reg strfmt.Registry) error {
|
|
||||||
return fn(req, reg)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ClientRequestWriter is an interface for things that know how to write to a request
|
|
||||||
type ClientRequestWriter interface {
|
|
||||||
WriteToRequest(ClientRequest, strfmt.Registry) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// ClientRequest is an interface for things that know how to
|
|
||||||
// add information to a swagger client request
|
|
||||||
type ClientRequest interface {
|
|
||||||
SetHeaderParam(string, ...string) error
|
|
||||||
|
|
||||||
SetQueryParam(string, ...string) error
|
|
||||||
|
|
||||||
SetFormParam(string, ...string) error
|
|
||||||
|
|
||||||
SetPathParam(string, string) error
|
|
||||||
|
|
||||||
GetQueryParams() url.Values
|
|
||||||
|
|
||||||
SetFileParam(string, ...NamedReadCloser) error
|
|
||||||
|
|
||||||
SetBodyParam(interface{}) error
|
|
||||||
|
|
||||||
SetTimeout(time.Duration) error
|
|
||||||
|
|
||||||
GetMethod() string
|
|
||||||
|
|
||||||
GetPath() string
|
|
||||||
|
|
||||||
GetBody() []byte
|
|
||||||
|
|
||||||
GetBodyParam() interface{}
|
|
||||||
|
|
||||||
GetFileParam() map[string][]NamedReadCloser
|
|
||||||
}
|
|
||||||
|
|
||||||
// NamedReadCloser represents a named ReadCloser interface
|
|
||||||
type NamedReadCloser interface {
|
|
||||||
io.ReadCloser
|
|
||||||
Name() string
|
|
||||||
}
|
|
||||||
|
|
||||||
// NamedReader creates a NamedReadCloser for use as file upload
|
|
||||||
func NamedReader(name string, rdr io.Reader) NamedReadCloser {
|
|
||||||
rc, ok := rdr.(io.ReadCloser)
|
|
||||||
if !ok {
|
|
||||||
rc = ioutil.NopCloser(rdr)
|
|
||||||
}
|
|
||||||
return &namedReadCloser{
|
|
||||||
name: name,
|
|
||||||
cr: rc,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type namedReadCloser struct {
|
|
||||||
name string
|
|
||||||
cr io.ReadCloser
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *namedReadCloser) Close() error {
|
|
||||||
return n.cr.Close()
|
|
||||||
}
|
|
||||||
func (n *namedReadCloser) Read(p []byte) (int, error) {
|
|
||||||
return n.cr.Read(p)
|
|
||||||
}
|
|
||||||
func (n *namedReadCloser) Name() string {
|
|
||||||
return n.name
|
|
||||||
}
|
|
63
vendor/github.com/go-openapi/runtime/client_response.go
generated
vendored
63
vendor/github.com/go-openapi/runtime/client_response.go
generated
vendored
|
@ -1,63 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package runtime
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A ClientResponse represents a client response
|
|
||||||
// This bridges between responses obtained from different transports
|
|
||||||
type ClientResponse interface {
|
|
||||||
Code() int
|
|
||||||
Message() string
|
|
||||||
GetHeader(string) string
|
|
||||||
Body() io.ReadCloser
|
|
||||||
}
|
|
||||||
|
|
||||||
// A ClientResponseReaderFunc turns a function into a ClientResponseReader interface implementation
|
|
||||||
type ClientResponseReaderFunc func(ClientResponse, Consumer) (interface{}, error)
|
|
||||||
|
|
||||||
// ReadResponse reads the response
|
|
||||||
func (read ClientResponseReaderFunc) ReadResponse(resp ClientResponse, consumer Consumer) (interface{}, error) {
|
|
||||||
return read(resp, consumer)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A ClientResponseReader is an interface for things want to read a response.
|
|
||||||
// An application of this is to create structs from response values
|
|
||||||
type ClientResponseReader interface {
|
|
||||||
ReadResponse(ClientResponse, Consumer) (interface{}, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewAPIError creates a new API error
|
|
||||||
func NewAPIError(opName string, payload interface{}, code int) *APIError {
|
|
||||||
return &APIError{
|
|
||||||
OperationName: opName,
|
|
||||||
Response: payload,
|
|
||||||
Code: code,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// APIError wraps an error model and captures the status code
|
|
||||||
type APIError struct {
|
|
||||||
OperationName string
|
|
||||||
Response interface{}
|
|
||||||
Code int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *APIError) Error() string {
|
|
||||||
return fmt.Sprintf("%s (status %d): %+v ", a.OperationName, a.Code, a.Response)
|
|
||||||
}
|
|
45
vendor/github.com/go-openapi/runtime/constants.go
generated
vendored
45
vendor/github.com/go-openapi/runtime/constants.go
generated
vendored
|
@ -1,45 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package runtime
|
|
||||||
|
|
||||||
const (
|
|
||||||
// HeaderContentType represents a http content-type header, it's value is supposed to be a mime type
|
|
||||||
HeaderContentType = "Content-Type"
|
|
||||||
|
|
||||||
// HeaderTransferEncoding represents a http transfer-encoding header.
|
|
||||||
HeaderTransferEncoding = "Transfer-Encoding"
|
|
||||||
|
|
||||||
// HeaderAccept the Accept header
|
|
||||||
HeaderAccept = "Accept"
|
|
||||||
|
|
||||||
charsetKey = "charset"
|
|
||||||
|
|
||||||
// DefaultMime the default fallback mime type
|
|
||||||
DefaultMime = "application/octet-stream"
|
|
||||||
// JSONMime the json mime type
|
|
||||||
JSONMime = "application/json"
|
|
||||||
// YAMLMime the yaml mime type
|
|
||||||
YAMLMime = "application/x-yaml"
|
|
||||||
// XMLMime the xml mime type
|
|
||||||
XMLMime = "application/xml"
|
|
||||||
// TextMime the text mime type
|
|
||||||
TextMime = "text/plain"
|
|
||||||
// HTMLMime the html mime type
|
|
||||||
HTMLMime = "text/html"
|
|
||||||
// MultipartFormMime the multipart form mime type
|
|
||||||
MultipartFormMime = "multipart/form-data"
|
|
||||||
// URLencodedFormMime the url encoded form mime type
|
|
||||||
URLencodedFormMime = "application/x-www-form-urlencoded"
|
|
||||||
)
|
|
9
vendor/github.com/go-openapi/runtime/discard.go
generated
vendored
9
vendor/github.com/go-openapi/runtime/discard.go
generated
vendored
|
@ -1,9 +0,0 @@
|
||||||
package runtime
|
|
||||||
|
|
||||||
import "io"
|
|
||||||
|
|
||||||
// DiscardConsumer does absolutely nothing, it's a black hole.
|
|
||||||
var DiscardConsumer = ConsumerFunc(func(_ io.Reader, _ interface{}) error { return nil })
|
|
||||||
|
|
||||||
// DiscardProducer does absolutely nothing, it's a black hole.
|
|
||||||
var DiscardProducer = ProducerFunc(func(_ io.Writer, _ interface{}) error { return nil })
|
|
33
vendor/github.com/go-openapi/runtime/file.go
generated
vendored
33
vendor/github.com/go-openapi/runtime/file.go
generated
vendored
|
@ -1,33 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package runtime
|
|
||||||
|
|
||||||
import "mime/multipart"
|
|
||||||
|
|
||||||
// File represents an uploaded file.
|
|
||||||
type File struct {
|
|
||||||
Data multipart.File
|
|
||||||
Header *multipart.FileHeader
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read bytes from the file
|
|
||||||
func (f *File) Read(p []byte) (n int, err error) {
|
|
||||||
return f.Data.Read(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Close the file
|
|
||||||
func (f *File) Close() error {
|
|
||||||
return f.Data.Close()
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue