diff --git a/go.mod b/go.mod
index 84635b4..ce3f6ec 100644
--- a/go.mod
+++ b/go.mod
@@ -3,10 +3,13 @@ module github.com/yannh/kubeconform
go 1.17
require (
- github.com/beevik/etree v1.1.0
- github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
- github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
+ github.com/santhosh-tekuri/jsonschema/v5 v5.1.1
github.com/xeipuuv/gojsonschema v1.2.0
- gopkg.in/yaml.v2 v2.4.0 // indirect
sigs.k8s.io/yaml v1.2.0
)
+
+require (
+ github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
+ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
+ gopkg.in/yaml.v2 v2.4.0 // indirect
+)
diff --git a/go.sum b/go.sum
index 456e485..a0a698b 100644
--- a/go.sum
+++ b/go.sum
@@ -1,10 +1,10 @@
-github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs=
-github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/santhosh-tekuri/jsonschema/v5 v5.1.1 h1:lEOLY2vyGIqKWUI9nzsOJRV3mb3WC9dXYORsLEUcoeY=
+github.com/santhosh-tekuri/jsonschema/v5 v5.1.1/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
diff --git a/pkg/validator/validator.go b/pkg/validator/validator.go
index 09eeb53..a4ca087 100644
--- a/pkg/validator/validator.go
+++ b/pkg/validator/validator.go
@@ -6,11 +6,10 @@ import (
"fmt"
"io"
+ jsonschema "github.com/santhosh-tekuri/jsonschema/v5"
"github.com/yannh/kubeconform/pkg/cache"
"github.com/yannh/kubeconform/pkg/registry"
"github.com/yannh/kubeconform/pkg/resource"
-
- "github.com/xeipuuv/gojsonschema"
"sigs.k8s.io/yaml"
)
@@ -91,7 +90,7 @@ func New(schemaLocations []string, opts Opts) (Validator, error) {
type v struct {
opts Opts
schemaCache cache.Cache
- schemaDownload func(registries []registry.Registry, kind, version, k8sVersion string) (*gojsonschema.Schema, error)
+ schemaDownload func(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error)
regs []registry.Registry
}
@@ -151,13 +150,13 @@ func (val *v) ValidateResource(res resource.Resource) Result {
}
cached := false
- var schema *gojsonschema.Schema
+ var schema *jsonschema.Schema
if val.schemaCache != nil {
s, err := val.schemaCache.Get(sig.Kind, sig.Version, val.opts.KubernetesVersion)
if err == nil {
cached = true
- schema = s.(*gojsonschema.Schema)
+ schema = s.(*jsonschema.Schema)
}
}
@@ -179,28 +178,13 @@ func (val *v) ValidateResource(res resource.Resource) Result {
return Result{Resource: res, Err: fmt.Errorf("could not find schema for %s", sig.Kind), Status: Error}
}
- resourceLoader := gojsonschema.NewGoLoader(r)
-
- results, err := schema.Validate(resourceLoader)
+ err = schema.Validate(r)
if err != nil {
// This error can only happen if the Object to validate is poorly formed. There's no hope of saving this one
return Result{Resource: res, Status: Error, Err: fmt.Errorf("problem validating schema. Check JSON formatting: %s", err)}
}
- if results.Valid() {
- return Result{Resource: res, Status: Valid}
- }
-
- msg := ""
- for _, errMsg := range results.Errors() {
- if msg != "" {
- msg += " - "
- }
- details := errMsg.Details()
- msg += fmt.Sprintf("For field %s: %s", details["field"].(string), errMsg.Description())
- }
-
- return Result{Resource: res, Status: Invalid, Err: fmt.Errorf("%s", msg)}
+ return Result{Resource: res, Status: Valid}
}
// ValidateWithContext validates resources found in r
@@ -235,17 +219,17 @@ func (val *v) Validate(filename string, r io.ReadCloser) []Result {
return val.ValidateWithContext(context.Background(), filename, r)
}
-func downloadSchema(registries []registry.Registry, kind, version, k8sVersion string) (*gojsonschema.Schema, error) {
+func downloadSchema(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error) {
var err error
var schemaBytes []byte
for _, reg := range registries {
schemaBytes, err = reg.DownloadSchema(kind, version, k8sVersion)
if err == nil {
- schema, err := gojsonschema.NewSchema(gojsonschema.NewBytesLoader(schemaBytes))
-
+ schema, err := jsonschema.CompileString(fmt.Sprintf("%s%s%s", kind, version, k8sVersion), string(schemaBytes))
// If we got a non-parseable response, we try the next registry
if err != nil {
+ fmt.Printf("TOTO %s\n", err)
continue
}
return schema, err
diff --git a/vendor/github.com/beevik/etree/.travis.yml b/vendor/github.com/beevik/etree/.travis.yml
deleted file mode 100644
index f4cb25d..0000000
--- a/vendor/github.com/beevik/etree/.travis.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-language: go
-sudo: false
-
-go:
- - 1.11.x
- - tip
-
-matrix:
- allow_failures:
- - go: tip
-
-script:
- - go vet ./...
- - go test -v ./...
diff --git a/vendor/github.com/beevik/etree/CONTRIBUTORS b/vendor/github.com/beevik/etree/CONTRIBUTORS
deleted file mode 100644
index 03211a8..0000000
--- a/vendor/github.com/beevik/etree/CONTRIBUTORS
+++ /dev/null
@@ -1,10 +0,0 @@
-Brett Vickers (beevik)
-Felix Geisendörfer (felixge)
-Kamil Kisiel (kisielk)
-Graham King (grahamking)
-Matt Smith (ma314smith)
-Michal Jemala (michaljemala)
-Nicolas Piganeau (npiganeau)
-Chris Brown (ccbrown)
-Earncef Sequeira (earncef)
-Gabriel de Labachelerie (wuzuf)
diff --git a/vendor/github.com/beevik/etree/LICENSE b/vendor/github.com/beevik/etree/LICENSE
deleted file mode 100644
index 26f1f77..0000000
--- a/vendor/github.com/beevik/etree/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright 2015-2019 Brett Vickers. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
- 1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
- 2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDER ``AS IS'' AND ANY
-EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
-OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/beevik/etree/README.md b/vendor/github.com/beevik/etree/README.md
deleted file mode 100644
index 08ec26b..0000000
--- a/vendor/github.com/beevik/etree/README.md
+++ /dev/null
@@ -1,205 +0,0 @@
-[](https://travis-ci.org/beevik/etree)
-[](https://godoc.org/github.com/beevik/etree)
-
-etree
-=====
-
-The etree package is a lightweight, pure go package that expresses XML in
-the form of an element tree. Its design was inspired by the Python
-[ElementTree](http://docs.python.org/2/library/xml.etree.elementtree.html)
-module.
-
-Some of the package's capabilities and features:
-
-* Represents XML documents as trees of elements for easy traversal.
-* Imports, serializes, modifies or creates XML documents from scratch.
-* Writes and reads XML to/from files, byte slices, strings and io interfaces.
-* Performs simple or complex searches with lightweight XPath-like query APIs.
-* Auto-indents XML using spaces or tabs for better readability.
-* Implemented in pure go; depends only on standard go libraries.
-* Built on top of the go [encoding/xml](http://golang.org/pkg/encoding/xml)
- package.
-
-### Creating an XML document
-
-The following example creates an XML document from scratch using the etree
-package and outputs its indented contents to stdout.
-```go
-doc := etree.NewDocument()
-doc.CreateProcInst("xml", `version="1.0" encoding="UTF-8"`)
-doc.CreateProcInst("xml-stylesheet", `type="text/xsl" href="style.xsl"`)
-
-people := doc.CreateElement("People")
-people.CreateComment("These are all known people")
-
-jon := people.CreateElement("Person")
-jon.CreateAttr("name", "Jon")
-
-sally := people.CreateElement("Person")
-sally.CreateAttr("name", "Sally")
-
-doc.Indent(2)
-doc.WriteTo(os.Stdout)
-```
-
-Output:
-```xml
-
-
-
-
-
-
-
-```
-
-### Reading an XML file
-
-Suppose you have a file on disk called `bookstore.xml` containing the
-following data:
-
-```xml
-
-
-
- Everyday Italian
- Giada De Laurentiis
- 2005
- 30.00
-
-
-
- Harry Potter
- J K. Rowling
- 2005
- 29.99
-
-
-
- XQuery Kick Start
- James McGovern
- Per Bothner
- Kurt Cagle
- James Linn
- Vaidyanathan Nagarajan
- 2003
- 49.99
-
-
-
- Learning XML
- Erik T. Ray
- 2003
- 39.95
-
-
-
-```
-
-This code reads the file's contents into an etree document.
-```go
-doc := etree.NewDocument()
-if err := doc.ReadFromFile("bookstore.xml"); err != nil {
- panic(err)
-}
-```
-
-You can also read XML from a string, a byte slice, or an `io.Reader`.
-
-### Processing elements and attributes
-
-This example illustrates several ways to access elements and attributes using
-etree selection queries.
-```go
-root := doc.SelectElement("bookstore")
-fmt.Println("ROOT element:", root.Tag)
-
-for _, book := range root.SelectElements("book") {
- fmt.Println("CHILD element:", book.Tag)
- if title := book.SelectElement("title"); title != nil {
- lang := title.SelectAttrValue("lang", "unknown")
- fmt.Printf(" TITLE: %s (%s)\n", title.Text(), lang)
- }
- for _, attr := range book.Attr {
- fmt.Printf(" ATTR: %s=%s\n", attr.Key, attr.Value)
- }
-}
-```
-Output:
-```
-ROOT element: bookstore
-CHILD element: book
- TITLE: Everyday Italian (en)
- ATTR: category=COOKING
-CHILD element: book
- TITLE: Harry Potter (en)
- ATTR: category=CHILDREN
-CHILD element: book
- TITLE: XQuery Kick Start (en)
- ATTR: category=WEB
-CHILD element: book
- TITLE: Learning XML (en)
- ATTR: category=WEB
-```
-
-### Path queries
-
-This example uses etree's path functions to select all book titles that fall
-into the category of 'WEB'. The double-slash prefix in the path causes the
-search for book elements to occur recursively; book elements may appear at any
-level of the XML hierarchy.
-```go
-for _, t := range doc.FindElements("//book[@category='WEB']/title") {
- fmt.Println("Title:", t.Text())
-}
-```
-
-Output:
-```
-Title: XQuery Kick Start
-Title: Learning XML
-```
-
-This example finds the first book element under the root bookstore element and
-outputs the tag and text of each of its child elements.
-```go
-for _, e := range doc.FindElements("./bookstore/book[1]/*") {
- fmt.Printf("%s: %s\n", e.Tag, e.Text())
-}
-```
-
-Output:
-```
-title: Everyday Italian
-author: Giada De Laurentiis
-year: 2005
-price: 30.00
-```
-
-This example finds all books with a price of 49.99 and outputs their titles.
-```go
-path := etree.MustCompilePath("./bookstore/book[p:price='49.99']/title")
-for _, e := range doc.FindElementsPath(path) {
- fmt.Println(e.Text())
-}
-```
-
-Output:
-```
-XQuery Kick Start
-```
-
-Note that this example uses the FindElementsPath function, which takes as an
-argument a pre-compiled path object. Use precompiled paths when you plan to
-search with the same path more than once.
-
-### Other features
-
-These are just a few examples of the things the etree package can do. See the
-[documentation](http://godoc.org/github.com/beevik/etree) for a complete
-description of its capabilities.
-
-### Contributing
-
-This project accepts contributions. Just fork the repo and submit a pull
-request!
diff --git a/vendor/github.com/beevik/etree/RELEASE_NOTES.md b/vendor/github.com/beevik/etree/RELEASE_NOTES.md
deleted file mode 100644
index ee59d7a..0000000
--- a/vendor/github.com/beevik/etree/RELEASE_NOTES.md
+++ /dev/null
@@ -1,109 +0,0 @@
-Release v1.1.0
-==============
-
-**New Features**
-
-* New attribute helpers.
- * Added the `Element.SortAttrs` method, which lexicographically sorts an
- element's attributes by key.
-* New `ReadSettings` properties.
- * Added `Entity` for the support of custom entity maps.
-* New `WriteSettings` properties.
- * Added `UseCRLF` to allow the output of CR-LF newlines instead of the
- default LF newlines. This is useful on Windows systems.
-* Additional support for text and CDATA sections.
- * The `Element.Text` method now returns the concatenation of all consecutive
- character data tokens immediately following an element's opening tag.
- * Added `Element.SetCData` to replace the character data immediately
- following an element's opening tag with a CDATA section.
- * Added `Element.CreateCData` to create and add a CDATA section child
- `CharData` token to an element.
- * Added `Element.CreateText` to create and add a child text `CharData` token
- to an element.
- * Added `NewCData` to create a parentless CDATA section `CharData` token.
- * Added `NewText` to create a parentless text `CharData`
- token.
- * Added `CharData.IsCData` to detect if the token contains a CDATA section.
- * Added `CharData.IsWhitespace` to detect if the token contains whitespace
- inserted by one of the document Indent functions.
- * Modified `Element.SetText` so that it replaces a run of consecutive
- character data tokens following the element's opening tag (instead of just
- the first one).
-* New "tail text" support.
- * Added the `Element.Tail` method, which returns the text immediately
- following an element's closing tag.
- * Added the `Element.SetTail` method, which modifies the text immediately
- following an element's closing tag.
-* New element child insertion and removal methods.
- * Added the `Element.InsertChildAt` method, which inserts a new child token
- before the specified child token index.
- * Added the `Element.RemoveChildAt` method, which removes the child token at
- the specified child token index.
-* New element and attribute queries.
- * Added the `Element.Index` method, which returns the element's index within
- its parent element's child token list.
- * Added the `Element.NamespaceURI` method to return the namespace URI
- associated with an element.
- * Added the `Attr.NamespaceURI` method to return the namespace URI
- associated with an element.
- * Added the `Attr.Element` method to return the element that an attribute
- belongs to.
-* New Path filter functions.
- * Added `[local-name()='val']` to keep elements whose unprefixed tag matches
- the desired value.
- * Added `[name()='val']` to keep elements whose full tag matches the desired
- value.
- * Added `[namespace-prefix()='val']` to keep elements whose namespace prefix
- matches the desired value.
- * Added `[namespace-uri()='val']` to keep elements whose namespace URI
- matches the desired value.
-
-**Bug Fixes**
-
-* A default XML `CharSetReader` is now used to prevent failed parsing of XML
- documents using certain encodings.
- ([Issue](https://github.com/beevik/etree/issues/53)).
-* All characters are now properly escaped according to XML parsing rules.
- ([Issue](https://github.com/beevik/etree/issues/55)).
-* The `Document.Indent` and `Document.IndentTabs` functions no longer insert
- empty string `CharData` tokens.
-
-**Deprecated**
-
-* `Element`
- * The `InsertChild` method is deprecated. Use `InsertChildAt` instead.
- * The `CreateCharData` method is deprecated. Use `CreateText` instead.
-* `CharData`
- * The `NewCharData` method is deprecated. Use `NewText` instead.
-
-
-Release v1.0.1
-==============
-
-**Changes**
-
-* Added support for absolute etree Path queries. An absolute path begins with
- `/` or `//` and begins its search from the element's document root.
-* Added [`GetPath`](https://godoc.org/github.com/beevik/etree#Element.GetPath)
- and [`GetRelativePath`](https://godoc.org/github.com/beevik/etree#Element.GetRelativePath)
- functions to the [`Element`](https://godoc.org/github.com/beevik/etree#Element)
- type.
-
-**Breaking changes**
-
-* A path starting with `//` is now interpreted as an absolute path.
- Previously, it was interpreted as a relative path starting from the element
- whose
- [`FindElement`](https://godoc.org/github.com/beevik/etree#Element.FindElement)
- method was called. To remain compatible with this release, all paths
- prefixed with `//` should be prefixed with `.//` when called from any
- element other than the document's root.
-* [**edit 2/1/2019**]: Minor releases should not contain breaking changes.
- Even though this breaking change was very minor, it was a mistake to include
- it in this minor release. In the future, all breaking changes will be
- limited to major releases (e.g., version 2.0.0).
-
-Release v1.0.0
-==============
-
-Initial release.
diff --git a/vendor/github.com/beevik/etree/etree.go b/vendor/github.com/beevik/etree/etree.go
deleted file mode 100644
index 9e24f90..0000000
--- a/vendor/github.com/beevik/etree/etree.go
+++ /dev/null
@@ -1,1453 +0,0 @@
-// Copyright 2015-2019 Brett Vickers.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package etree provides XML services through an Element Tree
-// abstraction.
-package etree
-
-import (
- "bufio"
- "bytes"
- "encoding/xml"
- "errors"
- "io"
- "os"
- "sort"
- "strings"
-)
-
-const (
- // NoIndent is used with Indent to disable all indenting.
- NoIndent = -1
-)
-
-// ErrXML is returned when XML parsing fails due to incorrect formatting.
-var ErrXML = errors.New("etree: invalid XML format")
-
-// ReadSettings allow for changing the default behavior of the ReadFrom*
-// methods.
-type ReadSettings struct {
- // CharsetReader to be passed to standard xml.Decoder. Default: nil.
- CharsetReader func(charset string, input io.Reader) (io.Reader, error)
-
- // Permissive allows input containing common mistakes such as missing tags
- // or attribute values. Default: false.
- Permissive bool
-
- // Entity to be passed to standard xml.Decoder. Default: nil.
- Entity map[string]string
-}
-
-// newReadSettings creates a default ReadSettings record.
-func newReadSettings() ReadSettings {
- return ReadSettings{
- CharsetReader: func(label string, input io.Reader) (io.Reader, error) {
- return input, nil
- },
- Permissive: false,
- }
-}
-
-// WriteSettings allow for changing the serialization behavior of the WriteTo*
-// methods.
-type WriteSettings struct {
- // CanonicalEndTags forces the production of XML end tags, even for
- // elements that have no child elements. Default: false.
- CanonicalEndTags bool
-
- // CanonicalText forces the production of XML character references for
- // text data characters &, <, and >. If false, XML character references
- // are also produced for " and '. Default: false.
- CanonicalText bool
-
- // CanonicalAttrVal forces the production of XML character references for
- // attribute value characters &, < and ". If false, XML character
- // references are also produced for > and '. Default: false.
- CanonicalAttrVal bool
-
- // When outputting indented XML, use a carriage return and linefeed
- // ("\r\n") as a new-line delimiter instead of just a linefeed ("\n").
- // This is useful on Windows-based systems.
- UseCRLF bool
-}
-
-// newWriteSettings creates a default WriteSettings record.
-func newWriteSettings() WriteSettings {
- return WriteSettings{
- CanonicalEndTags: false,
- CanonicalText: false,
- CanonicalAttrVal: false,
- UseCRLF: false,
- }
-}
-
-// A Token is an empty interface that represents an Element, CharData,
-// Comment, Directive, or ProcInst.
-type Token interface {
- Parent() *Element
- Index() int
- dup(parent *Element) Token
- setParent(parent *Element)
- setIndex(index int)
- writeTo(w *bufio.Writer, s *WriteSettings)
-}
-
-// A Document is a container holding a complete XML hierarchy. Its embedded
-// element contains zero or more children, one of which is usually the root
-// element. The embedded element may include other children such as
-// processing instructions or BOM CharData tokens.
-type Document struct {
- Element
- ReadSettings ReadSettings
- WriteSettings WriteSettings
-}
-
-// An Element represents an XML element, its attributes, and its child tokens.
-type Element struct {
- Space, Tag string // namespace prefix and tag
- Attr []Attr // key-value attribute pairs
- Child []Token // child tokens (elements, comments, etc.)
- parent *Element // parent element
- index int // token index in parent's children
-}
-
-// An Attr represents a key-value attribute of an XML element.
-type Attr struct {
- Space, Key string // The attribute's namespace prefix and key
- Value string // The attribute value string
- element *Element // element containing the attribute
-}
-
-// charDataFlags are used with CharData tokens to store additional settings.
-type charDataFlags uint8
-
-const (
- // The CharData was created by an indent function as whitespace.
- whitespaceFlag charDataFlags = 1 << iota
-
- // The CharData contains a CDATA section.
- cdataFlag
-)
-
-// CharData can be used to represent character data or a CDATA section within
-// an XML document.
-type CharData struct {
- Data string
- parent *Element
- index int
- flags charDataFlags
-}
-
-// A Comment represents an XML comment.
-type Comment struct {
- Data string
- parent *Element
- index int
-}
-
-// A Directive represents an XML directive.
-type Directive struct {
- Data string
- parent *Element
- index int
-}
-
-// A ProcInst represents an XML processing instruction.
-type ProcInst struct {
- Target string
- Inst string
- parent *Element
- index int
-}
-
-// NewDocument creates an XML document without a root element.
-func NewDocument() *Document {
- return &Document{
- Element{Child: make([]Token, 0)},
- newReadSettings(),
- newWriteSettings(),
- }
-}
-
-// Copy returns a recursive, deep copy of the document.
-func (d *Document) Copy() *Document {
- return &Document{*(d.dup(nil).(*Element)), d.ReadSettings, d.WriteSettings}
-}
-
-// Root returns the root element of the document, or nil if there is no root
-// element.
-func (d *Document) Root() *Element {
- for _, t := range d.Child {
- if c, ok := t.(*Element); ok {
- return c
- }
- }
- return nil
-}
-
-// SetRoot replaces the document's root element with e. If the document
-// already has a root when this function is called, then the document's
-// original root is unbound first. If the element e is bound to another
-// document (or to another element within a document), then it is unbound
-// first.
-func (d *Document) SetRoot(e *Element) {
- if e.parent != nil {
- e.parent.RemoveChild(e)
- }
-
- p := &d.Element
- e.setParent(p)
-
- // If there is already a root element, replace it.
- for i, t := range p.Child {
- if _, ok := t.(*Element); ok {
- t.setParent(nil)
- t.setIndex(-1)
- p.Child[i] = e
- e.setIndex(i)
- return
- }
- }
-
- // No existing root element, so add it.
- p.addChild(e)
-}
-
-// ReadFrom reads XML from the reader r into the document d. It returns the
-// number of bytes read and any error encountered.
-func (d *Document) ReadFrom(r io.Reader) (n int64, err error) {
- return d.Element.readFrom(r, d.ReadSettings)
-}
-
-// ReadFromFile reads XML from the string s into the document d.
-func (d *Document) ReadFromFile(filename string) error {
- f, err := os.Open(filename)
- if err != nil {
- return err
- }
- defer f.Close()
- _, err = d.ReadFrom(f)
- return err
-}
-
-// ReadFromBytes reads XML from the byte slice b into the document d.
-func (d *Document) ReadFromBytes(b []byte) error {
- _, err := d.ReadFrom(bytes.NewReader(b))
- return err
-}
-
-// ReadFromString reads XML from the string s into the document d.
-func (d *Document) ReadFromString(s string) error {
- _, err := d.ReadFrom(strings.NewReader(s))
- return err
-}
-
-// WriteTo serializes an XML document into the writer w. It
-// returns the number of bytes written and any error encountered.
-func (d *Document) WriteTo(w io.Writer) (n int64, err error) {
- cw := newCountWriter(w)
- b := bufio.NewWriter(cw)
- for _, c := range d.Child {
- c.writeTo(b, &d.WriteSettings)
- }
- err, n = b.Flush(), cw.bytes
- return
-}
-
-// WriteToFile serializes an XML document into the file named
-// filename.
-func (d *Document) WriteToFile(filename string) error {
- f, err := os.Create(filename)
- if err != nil {
- return err
- }
- defer f.Close()
- _, err = d.WriteTo(f)
- return err
-}
-
-// WriteToBytes serializes the XML document into a slice of
-// bytes.
-func (d *Document) WriteToBytes() (b []byte, err error) {
- var buf bytes.Buffer
- if _, err = d.WriteTo(&buf); err != nil {
- return
- }
- return buf.Bytes(), nil
-}
-
-// WriteToString serializes the XML document into a string.
-func (d *Document) WriteToString() (s string, err error) {
- var b []byte
- if b, err = d.WriteToBytes(); err != nil {
- return
- }
- return string(b), nil
-}
-
-type indentFunc func(depth int) string
-
-// Indent modifies the document's element tree by inserting character data
-// tokens containing newlines and indentation. The amount of indentation per
-// depth level is given as spaces. Pass etree.NoIndent for spaces if you want
-// no indentation at all.
-func (d *Document) Indent(spaces int) {
- var indent indentFunc
- switch {
- case spaces < 0:
- indent = func(depth int) string { return "" }
- case d.WriteSettings.UseCRLF == true:
- indent = func(depth int) string { return indentCRLF(depth*spaces, indentSpaces) }
- default:
- indent = func(depth int) string { return indentLF(depth*spaces, indentSpaces) }
- }
- d.Element.indent(0, indent)
-}
-
-// IndentTabs modifies the document's element tree by inserting CharData
-// tokens containing newlines and tabs for indentation. One tab is used per
-// indentation level.
-func (d *Document) IndentTabs() {
- var indent indentFunc
- switch d.WriteSettings.UseCRLF {
- case true:
- indent = func(depth int) string { return indentCRLF(depth, indentTabs) }
- default:
- indent = func(depth int) string { return indentLF(depth, indentTabs) }
- }
- d.Element.indent(0, indent)
-}
-
-// NewElement creates an unparented element with the specified tag. The tag
-// may be prefixed by a namespace prefix and a colon.
-func NewElement(tag string) *Element {
- space, stag := spaceDecompose(tag)
- return newElement(space, stag, nil)
-}
-
-// newElement is a helper function that creates an element and binds it to
-// a parent element if possible.
-func newElement(space, tag string, parent *Element) *Element {
- e := &Element{
- Space: space,
- Tag: tag,
- Attr: make([]Attr, 0),
- Child: make([]Token, 0),
- parent: parent,
- index: -1,
- }
- if parent != nil {
- parent.addChild(e)
- }
- return e
-}
-
-// Copy creates a recursive, deep copy of the element and all its attributes
-// and children. The returned element has no parent but can be parented to a
-// another element using AddElement, or to a document using SetRoot.
-func (e *Element) Copy() *Element {
- return e.dup(nil).(*Element)
-}
-
-// FullTag returns the element e's complete tag, including namespace prefix if
-// present.
-func (e *Element) FullTag() string {
- if e.Space == "" {
- return e.Tag
- }
- return e.Space + ":" + e.Tag
-}
-
-// NamespaceURI returns the XML namespace URI associated with the element. If
-// the element is part of the XML default namespace, NamespaceURI returns the
-// empty string.
-func (e *Element) NamespaceURI() string {
- if e.Space == "" {
- return e.findDefaultNamespaceURI()
- }
- return e.findLocalNamespaceURI(e.Space)
-}
-
-// findLocalNamespaceURI finds the namespace URI corresponding to the
-// requested prefix.
-func (e *Element) findLocalNamespaceURI(prefix string) string {
- for _, a := range e.Attr {
- if a.Space == "xmlns" && a.Key == prefix {
- return a.Value
- }
- }
-
- if e.parent == nil {
- return ""
- }
-
- return e.parent.findLocalNamespaceURI(prefix)
-}
-
-// findDefaultNamespaceURI finds the default namespace URI of the element.
-func (e *Element) findDefaultNamespaceURI() string {
- for _, a := range e.Attr {
- if a.Space == "" && a.Key == "xmlns" {
- return a.Value
- }
- }
-
- if e.parent == nil {
- return ""
- }
-
- return e.parent.findDefaultNamespaceURI()
-}
-
-// hasText returns true if the element has character data immediately
-// folllowing the element's opening tag.
-func (e *Element) hasText() bool {
- if len(e.Child) == 0 {
- return false
- }
- _, ok := e.Child[0].(*CharData)
- return ok
-}
-
-// namespacePrefix returns the namespace prefix associated with the element.
-func (e *Element) namespacePrefix() string {
- return e.Space
-}
-
-// name returns the tag associated with the element.
-func (e *Element) name() string {
- return e.Tag
-}
-
-// Text returns all character data immediately following the element's opening
-// tag.
-func (e *Element) Text() string {
- if len(e.Child) == 0 {
- return ""
- }
-
- text := ""
- for _, ch := range e.Child {
- if cd, ok := ch.(*CharData); ok {
- if text == "" {
- text = cd.Data
- } else {
- text = text + cd.Data
- }
- } else {
- break
- }
- }
- return text
-}
-
-// SetText replaces all character data immediately following an element's
-// opening tag with the requested string.
-func (e *Element) SetText(text string) {
- e.replaceText(0, text, 0)
-}
-
-// SetCData replaces all character data immediately following an element's
-// opening tag with a CDATA section.
-func (e *Element) SetCData(text string) {
- e.replaceText(0, text, cdataFlag)
-}
-
-// Tail returns all character data immediately following the element's end
-// tag.
-func (e *Element) Tail() string {
- if e.Parent() == nil {
- return ""
- }
-
- p := e.Parent()
- i := e.Index()
-
- text := ""
- for _, ch := range p.Child[i+1:] {
- if cd, ok := ch.(*CharData); ok {
- if text == "" {
- text = cd.Data
- } else {
- text = text + cd.Data
- }
- } else {
- break
- }
- }
- return text
-}
-
-// SetTail replaces all character data immediately following the element's end
-// tag with the requested string.
-func (e *Element) SetTail(text string) {
- if e.Parent() == nil {
- return
- }
-
- p := e.Parent()
- p.replaceText(e.Index()+1, text, 0)
-}
-
-// replaceText is a helper function that replaces a series of chardata tokens
-// starting at index i with the requested text.
-func (e *Element) replaceText(i int, text string, flags charDataFlags) {
- end := e.findTermCharDataIndex(i)
-
- switch {
- case end == i:
- if text != "" {
- // insert a new chardata token at index i
- cd := newCharData(text, flags, nil)
- e.InsertChildAt(i, cd)
- }
-
- case end == i+1:
- if text == "" {
- // remove the chardata token at index i
- e.RemoveChildAt(i)
- } else {
- // replace the first and only character token at index i
- cd := e.Child[i].(*CharData)
- cd.Data, cd.flags = text, flags
- }
-
- default:
- if text == "" {
- // remove all chardata tokens starting from index i
- copy(e.Child[i:], e.Child[end:])
- removed := end - i
- e.Child = e.Child[:len(e.Child)-removed]
- for j := i; j < len(e.Child); j++ {
- e.Child[j].setIndex(j)
- }
- } else {
- // replace the first chardata token at index i and remove all
- // subsequent chardata tokens
- cd := e.Child[i].(*CharData)
- cd.Data, cd.flags = text, flags
- copy(e.Child[i+1:], e.Child[end:])
- removed := end - (i + 1)
- e.Child = e.Child[:len(e.Child)-removed]
- for j := i + 1; j < len(e.Child); j++ {
- e.Child[j].setIndex(j)
- }
- }
- }
-}
-
-// findTermCharDataIndex finds the index of the first child token that isn't
-// a CharData token. It starts from the requested start index.
-func (e *Element) findTermCharDataIndex(start int) int {
- for i := start; i < len(e.Child); i++ {
- if _, ok := e.Child[i].(*CharData); !ok {
- return i
- }
- }
- return len(e.Child)
-}
-
-// CreateElement creates an element with the specified tag and adds it as the
-// last child element of the element e. The tag may be prefixed by a namespace
-// prefix and a colon.
-func (e *Element) CreateElement(tag string) *Element {
- space, stag := spaceDecompose(tag)
- return newElement(space, stag, e)
-}
-
-// AddChild adds the token t as the last child of element e. If token t was
-// already the child of another element, it is first removed from its current
-// parent element.
-func (e *Element) AddChild(t Token) {
- if t.Parent() != nil {
- t.Parent().RemoveChild(t)
- }
-
- t.setParent(e)
- e.addChild(t)
-}
-
-// InsertChild inserts the token t before e's existing child token ex. If ex
-// is nil or ex is not a child of e, then t is added to the end of e's child
-// token list. If token t was already the child of another element, it is
-// first removed from its current parent element.
-//
-// Deprecated: InsertChild is deprecated. Use InsertChildAt instead.
-func (e *Element) InsertChild(ex Token, t Token) {
- if ex == nil || ex.Parent() != e {
- e.AddChild(t)
- return
- }
-
- if t.Parent() != nil {
- t.Parent().RemoveChild(t)
- }
-
- t.setParent(e)
-
- i := ex.Index()
- e.Child = append(e.Child, nil)
- copy(e.Child[i+1:], e.Child[i:])
- e.Child[i] = t
-
- for j := i; j < len(e.Child); j++ {
- e.Child[j].setIndex(j)
- }
-}
-
-// InsertChildAt inserts the token t into the element e's list of child tokens
-// just before the requested index. If the index is greater than or equal to
-// the length of the list of child tokens, the token t is added to the end of
-// the list.
-func (e *Element) InsertChildAt(index int, t Token) {
- if index >= len(e.Child) {
- e.AddChild(t)
- return
- }
-
- if t.Parent() != nil {
- if t.Parent() == e && t.Index() > index {
- index--
- }
- t.Parent().RemoveChild(t)
- }
-
- t.setParent(e)
-
- e.Child = append(e.Child, nil)
- copy(e.Child[index+1:], e.Child[index:])
- e.Child[index] = t
-
- for j := index; j < len(e.Child); j++ {
- e.Child[j].setIndex(j)
- }
-}
-
-// RemoveChild attempts to remove the token t from element e's list of
-// children. If the token t is a child of e, then it is returned. Otherwise,
-// nil is returned.
-func (e *Element) RemoveChild(t Token) Token {
- if t.Parent() != e {
- return nil
- }
- return e.RemoveChildAt(t.Index())
-}
-
-// RemoveChildAt removes the index-th child token from the element e. The
-// removed child token is returned. If the index is out of bounds, no child is
-// removed and nil is returned.
-func (e *Element) RemoveChildAt(index int) Token {
- if index >= len(e.Child) {
- return nil
- }
-
- t := e.Child[index]
- for j := index + 1; j < len(e.Child); j++ {
- e.Child[j].setIndex(j - 1)
- }
- e.Child = append(e.Child[:index], e.Child[index+1:]...)
- t.setIndex(-1)
- t.setParent(nil)
- return t
-}
-
-// ReadFrom reads XML from the reader r and stores the result as a new child
-// of element e.
-func (e *Element) readFrom(ri io.Reader, settings ReadSettings) (n int64, err error) {
- r := newCountReader(ri)
- dec := xml.NewDecoder(r)
- dec.CharsetReader = settings.CharsetReader
- dec.Strict = !settings.Permissive
- dec.Entity = settings.Entity
- var stack stack
- stack.push(e)
- for {
- t, err := dec.RawToken()
- switch {
- case err == io.EOF:
- return r.bytes, nil
- case err != nil:
- return r.bytes, err
- case stack.empty():
- return r.bytes, ErrXML
- }
-
- top := stack.peek().(*Element)
-
- switch t := t.(type) {
- case xml.StartElement:
- e := newElement(t.Name.Space, t.Name.Local, top)
- for _, a := range t.Attr {
- e.createAttr(a.Name.Space, a.Name.Local, a.Value, e)
- }
- stack.push(e)
- case xml.EndElement:
- stack.pop()
- case xml.CharData:
- data := string(t)
- var flags charDataFlags
- if isWhitespace(data) {
- flags = whitespaceFlag
- }
- newCharData(data, flags, top)
- case xml.Comment:
- newComment(string(t), top)
- case xml.Directive:
- newDirective(string(t), top)
- case xml.ProcInst:
- newProcInst(t.Target, string(t.Inst), top)
- }
- }
-}
-
-// SelectAttr finds an element attribute matching the requested key and
-// returns it if found. Returns nil if no matching attribute is found. The key
-// may be prefixed by a namespace prefix and a colon.
-func (e *Element) SelectAttr(key string) *Attr {
- space, skey := spaceDecompose(key)
- for i, a := range e.Attr {
- if spaceMatch(space, a.Space) && skey == a.Key {
- return &e.Attr[i]
- }
- }
- return nil
-}
-
-// SelectAttrValue finds an element attribute matching the requested key and
-// returns its value if found. The key may be prefixed by a namespace prefix
-// and a colon. If the key is not found, the dflt value is returned instead.
-func (e *Element) SelectAttrValue(key, dflt string) string {
- space, skey := spaceDecompose(key)
- for _, a := range e.Attr {
- if spaceMatch(space, a.Space) && skey == a.Key {
- return a.Value
- }
- }
- return dflt
-}
-
-// ChildElements returns all elements that are children of element e.
-func (e *Element) ChildElements() []*Element {
- var elements []*Element
- for _, t := range e.Child {
- if c, ok := t.(*Element); ok {
- elements = append(elements, c)
- }
- }
- return elements
-}
-
-// SelectElement returns the first child element with the given tag. The tag
-// may be prefixed by a namespace prefix and a colon. Returns nil if no
-// element with a matching tag was found.
-func (e *Element) SelectElement(tag string) *Element {
- space, stag := spaceDecompose(tag)
- for _, t := range e.Child {
- if c, ok := t.(*Element); ok && spaceMatch(space, c.Space) && stag == c.Tag {
- return c
- }
- }
- return nil
-}
-
-// SelectElements returns a slice of all child elements with the given tag.
-// The tag may be prefixed by a namespace prefix and a colon.
-func (e *Element) SelectElements(tag string) []*Element {
- space, stag := spaceDecompose(tag)
- var elements []*Element
- for _, t := range e.Child {
- if c, ok := t.(*Element); ok && spaceMatch(space, c.Space) && stag == c.Tag {
- elements = append(elements, c)
- }
- }
- return elements
-}
-
-// FindElement returns the first element matched by the XPath-like path
-// string. Returns nil if no element is found using the path. Panics if an
-// invalid path string is supplied.
-func (e *Element) FindElement(path string) *Element {
- return e.FindElementPath(MustCompilePath(path))
-}
-
-// FindElementPath returns the first element matched by the XPath-like path
-// string. Returns nil if no element is found using the path.
-func (e *Element) FindElementPath(path Path) *Element {
- p := newPather()
- elements := p.traverse(e, path)
- switch {
- case len(elements) > 0:
- return elements[0]
- default:
- return nil
- }
-}
-
-// FindElements returns a slice of elements matched by the XPath-like path
-// string. Panics if an invalid path string is supplied.
-func (e *Element) FindElements(path string) []*Element {
- return e.FindElementsPath(MustCompilePath(path))
-}
-
-// FindElementsPath returns a slice of elements matched by the Path object.
-func (e *Element) FindElementsPath(path Path) []*Element {
- p := newPather()
- return p.traverse(e, path)
-}
-
-// GetPath returns the absolute path of the element.
-func (e *Element) GetPath() string {
- path := []string{}
- for seg := e; seg != nil; seg = seg.Parent() {
- if seg.Tag != "" {
- path = append(path, seg.Tag)
- }
- }
-
- // Reverse the path.
- for i, j := 0, len(path)-1; i < j; i, j = i+1, j-1 {
- path[i], path[j] = path[j], path[i]
- }
-
- return "/" + strings.Join(path, "/")
-}
-
-// GetRelativePath returns the path of the element relative to the source
-// element. If the two elements are not part of the same element tree, then
-// GetRelativePath returns the empty string.
-func (e *Element) GetRelativePath(source *Element) string {
- var path []*Element
-
- if source == nil {
- return ""
- }
-
- // Build a reverse path from the element toward the root. Stop if the
- // source element is encountered.
- var seg *Element
- for seg = e; seg != nil && seg != source; seg = seg.Parent() {
- path = append(path, seg)
- }
-
- // If we found the source element, reverse the path and compose the
- // string.
- if seg == source {
- if len(path) == 0 {
- return "."
- }
- parts := []string{}
- for i := len(path) - 1; i >= 0; i-- {
- parts = append(parts, path[i].Tag)
- }
- return "./" + strings.Join(parts, "/")
- }
-
- // The source wasn't encountered, so climb from the source element toward
- // the root of the tree until an element in the reversed path is
- // encountered.
-
- findPathIndex := func(e *Element, path []*Element) int {
- for i, ee := range path {
- if e == ee {
- return i
- }
- }
- return -1
- }
-
- climb := 0
- for seg = source; seg != nil; seg = seg.Parent() {
- i := findPathIndex(seg, path)
- if i >= 0 {
- path = path[:i] // truncate at found segment
- break
- }
- climb++
- }
-
- // No element in the reversed path was encountered, so the two elements
- // must not be part of the same tree.
- if seg == nil {
- return ""
- }
-
- // Reverse the (possibly truncated) path and prepend ".." segments to
- // climb.
- parts := []string{}
- for i := 0; i < climb; i++ {
- parts = append(parts, "..")
- }
- for i := len(path) - 1; i >= 0; i-- {
- parts = append(parts, path[i].Tag)
- }
- return strings.Join(parts, "/")
-}
-
-// indent recursively inserts proper indentation between an
-// XML element's child tokens.
-func (e *Element) indent(depth int, indent indentFunc) {
- e.stripIndent()
- n := len(e.Child)
- if n == 0 {
- return
- }
-
- oldChild := e.Child
- e.Child = make([]Token, 0, n*2+1)
- isCharData, firstNonCharData := false, true
- for _, c := range oldChild {
- // Insert NL+indent before child if it's not character data.
- // Exceptions: when it's the first non-character-data child, or when
- // the child is at root depth.
- _, isCharData = c.(*CharData)
- if !isCharData {
- if !firstNonCharData || depth > 0 {
- s := indent(depth)
- if s != "" {
- newCharData(s, whitespaceFlag, e)
- }
- }
- firstNonCharData = false
- }
-
- e.addChild(c)
-
- // Recursively process child elements.
- if ce, ok := c.(*Element); ok {
- ce.indent(depth+1, indent)
- }
- }
-
- // Insert NL+indent before the last child.
- if !isCharData {
- if !firstNonCharData || depth > 0 {
- s := indent(depth - 1)
- if s != "" {
- newCharData(s, whitespaceFlag, e)
- }
- }
- }
-}
-
-// stripIndent removes any previously inserted indentation.
-func (e *Element) stripIndent() {
- // Count the number of non-indent child tokens
- n := len(e.Child)
- for _, c := range e.Child {
- if cd, ok := c.(*CharData); ok && cd.IsWhitespace() {
- n--
- }
- }
- if n == len(e.Child) {
- return
- }
-
- // Strip out indent CharData
- newChild := make([]Token, n)
- j := 0
- for _, c := range e.Child {
- if cd, ok := c.(*CharData); ok && cd.IsWhitespace() {
- continue
- }
- newChild[j] = c
- newChild[j].setIndex(j)
- j++
- }
- e.Child = newChild
-}
-
-// dup duplicates the element.
-func (e *Element) dup(parent *Element) Token {
- ne := &Element{
- Space: e.Space,
- Tag: e.Tag,
- Attr: make([]Attr, len(e.Attr)),
- Child: make([]Token, len(e.Child)),
- parent: parent,
- index: e.index,
- }
- for i, t := range e.Child {
- ne.Child[i] = t.dup(ne)
- }
- for i, a := range e.Attr {
- ne.Attr[i] = a
- }
- return ne
-}
-
-// Parent returns the element token's parent element, or nil if it has no
-// parent.
-func (e *Element) Parent() *Element {
- return e.parent
-}
-
-// Index returns the index of this element within its parent element's
-// list of child tokens. If this element has no parent element, the index
-// is -1.
-func (e *Element) Index() int {
- return e.index
-}
-
-// setParent replaces the element token's parent.
-func (e *Element) setParent(parent *Element) {
- e.parent = parent
-}
-
-// setIndex sets the element token's index within its parent's Child slice.
-func (e *Element) setIndex(index int) {
- e.index = index
-}
-
-// writeTo serializes the element to the writer w.
-func (e *Element) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteByte('<')
- w.WriteString(e.FullTag())
- for _, a := range e.Attr {
- w.WriteByte(' ')
- a.writeTo(w, s)
- }
- if len(e.Child) > 0 {
- w.WriteString(">")
- for _, c := range e.Child {
- c.writeTo(w, s)
- }
- w.Write([]byte{'<', '/'})
- w.WriteString(e.FullTag())
- w.WriteByte('>')
- } else {
- if s.CanonicalEndTags {
- w.Write([]byte{'>', '<', '/'})
- w.WriteString(e.FullTag())
- w.WriteByte('>')
- } else {
- w.Write([]byte{'/', '>'})
- }
- }
-}
-
-// addChild adds a child token to the element e.
-func (e *Element) addChild(t Token) {
- t.setIndex(len(e.Child))
- e.Child = append(e.Child, t)
-}
-
-// CreateAttr creates an attribute and adds it to element e. The key may be
-// prefixed by a namespace prefix and a colon. If an attribute with the key
-// already exists, its value is replaced.
-func (e *Element) CreateAttr(key, value string) *Attr {
- space, skey := spaceDecompose(key)
- return e.createAttr(space, skey, value, e)
-}
-
-// createAttr is a helper function that creates attributes.
-func (e *Element) createAttr(space, key, value string, parent *Element) *Attr {
- for i, a := range e.Attr {
- if space == a.Space && key == a.Key {
- e.Attr[i].Value = value
- return &e.Attr[i]
- }
- }
- a := Attr{
- Space: space,
- Key: key,
- Value: value,
- element: parent,
- }
- e.Attr = append(e.Attr, a)
- return &e.Attr[len(e.Attr)-1]
-}
-
-// RemoveAttr removes and returns a copy of the first attribute of the element
-// whose key matches the given key. The key may be prefixed by a namespace
-// prefix and a colon. If a matching attribute does not exist, nil is
-// returned.
-func (e *Element) RemoveAttr(key string) *Attr {
- space, skey := spaceDecompose(key)
- for i, a := range e.Attr {
- if space == a.Space && skey == a.Key {
- e.Attr = append(e.Attr[0:i], e.Attr[i+1:]...)
- return &Attr{
- Space: a.Space,
- Key: a.Key,
- Value: a.Value,
- element: nil,
- }
- }
- }
- return nil
-}
-
-// SortAttrs sorts the element's attributes lexicographically by key.
-func (e *Element) SortAttrs() {
- sort.Sort(byAttr(e.Attr))
-}
-
-type byAttr []Attr
-
-func (a byAttr) Len() int {
- return len(a)
-}
-
-func (a byAttr) Swap(i, j int) {
- a[i], a[j] = a[j], a[i]
-}
-
-func (a byAttr) Less(i, j int) bool {
- sp := strings.Compare(a[i].Space, a[j].Space)
- if sp == 0 {
- return strings.Compare(a[i].Key, a[j].Key) < 0
- }
- return sp < 0
-}
-
-// FullKey returns the attribute a's complete key, including namespace prefix
-// if present.
-func (a *Attr) FullKey() string {
- if a.Space == "" {
- return a.Key
- }
- return a.Space + ":" + a.Key
-}
-
-// Element returns the element containing the attribute.
-func (a *Attr) Element() *Element {
- return a.element
-}
-
-// NamespaceURI returns the XML namespace URI associated with the attribute.
-// If the element is part of the XML default namespace, NamespaceURI returns
-// the empty string.
-func (a *Attr) NamespaceURI() string {
- return a.element.NamespaceURI()
-}
-
-// writeTo serializes the attribute to the writer.
-func (a *Attr) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteString(a.FullKey())
- w.WriteString(`="`)
- var m escapeMode
- if s.CanonicalAttrVal {
- m = escapeCanonicalAttr
- } else {
- m = escapeNormal
- }
- escapeString(w, a.Value, m)
- w.WriteByte('"')
-}
-
-// NewText creates a parentless CharData token containing character data.
-func NewText(text string) *CharData {
- return newCharData(text, 0, nil)
-}
-
-// NewCData creates a parentless XML character CDATA section.
-func NewCData(data string) *CharData {
- return newCharData(data, cdataFlag, nil)
-}
-
-// NewCharData creates a parentless CharData token containing character data.
-//
-// Deprecated: NewCharData is deprecated. Instead, use NewText, which does the
-// same thing.
-func NewCharData(data string) *CharData {
- return newCharData(data, 0, nil)
-}
-
-// newCharData creates a character data token and binds it to a parent
-// element. If parent is nil, the CharData token remains unbound.
-func newCharData(data string, flags charDataFlags, parent *Element) *CharData {
- c := &CharData{
- Data: data,
- parent: parent,
- index: -1,
- flags: flags,
- }
- if parent != nil {
- parent.addChild(c)
- }
- return c
-}
-
-// CreateText creates a CharData token containing character data and adds it
-// as a child of element e.
-func (e *Element) CreateText(text string) *CharData {
- return newCharData(text, 0, e)
-}
-
-// CreateCData creates a CharData token containing a CDATA section and adds it
-// as a child of element e.
-func (e *Element) CreateCData(data string) *CharData {
- return newCharData(data, cdataFlag, e)
-}
-
-// CreateCharData creates a CharData token containing character data and adds
-// it as a child of element e.
-//
-// Deprecated: CreateCharData is deprecated. Instead, use CreateText, which
-// does the same thing.
-func (e *Element) CreateCharData(data string) *CharData {
- return newCharData(data, 0, e)
-}
-
-// dup duplicates the character data.
-func (c *CharData) dup(parent *Element) Token {
- return &CharData{
- Data: c.Data,
- flags: c.flags,
- parent: parent,
- index: c.index,
- }
-}
-
-// IsCData returns true if the character data token is to be encoded as a
-// CDATA section.
-func (c *CharData) IsCData() bool {
- return (c.flags & cdataFlag) != 0
-}
-
-// IsWhitespace returns true if the character data token was created by one of
-// the document Indent methods to contain only whitespace.
-func (c *CharData) IsWhitespace() bool {
- return (c.flags & whitespaceFlag) != 0
-}
-
-// Parent returns the character data token's parent element, or nil if it has
-// no parent.
-func (c *CharData) Parent() *Element {
- return c.parent
-}
-
-// Index returns the index of this CharData token within its parent element's
-// list of child tokens. If this CharData token has no parent element, the
-// index is -1.
-func (c *CharData) Index() int {
- return c.index
-}
-
-// setParent replaces the character data token's parent.
-func (c *CharData) setParent(parent *Element) {
- c.parent = parent
-}
-
-// setIndex sets the CharData token's index within its parent element's Child
-// slice.
-func (c *CharData) setIndex(index int) {
- c.index = index
-}
-
-// writeTo serializes character data to the writer.
-func (c *CharData) writeTo(w *bufio.Writer, s *WriteSettings) {
- if c.IsCData() {
- w.WriteString(``)
- } else {
- var m escapeMode
- if s.CanonicalText {
- m = escapeCanonicalText
- } else {
- m = escapeNormal
- }
- escapeString(w, c.Data, m)
- }
-}
-
-// NewComment creates a parentless XML comment.
-func NewComment(comment string) *Comment {
- return newComment(comment, nil)
-}
-
-// NewComment creates an XML comment and binds it to a parent element. If
-// parent is nil, the Comment remains unbound.
-func newComment(comment string, parent *Element) *Comment {
- c := &Comment{
- Data: comment,
- parent: parent,
- index: -1,
- }
- if parent != nil {
- parent.addChild(c)
- }
- return c
-}
-
-// CreateComment creates an XML comment and adds it as a child of element e.
-func (e *Element) CreateComment(comment string) *Comment {
- return newComment(comment, e)
-}
-
-// dup duplicates the comment.
-func (c *Comment) dup(parent *Element) Token {
- return &Comment{
- Data: c.Data,
- parent: parent,
- index: c.index,
- }
-}
-
-// Parent returns comment token's parent element, or nil if it has no parent.
-func (c *Comment) Parent() *Element {
- return c.parent
-}
-
-// Index returns the index of this Comment token within its parent element's
-// list of child tokens. If this Comment token has no parent element, the
-// index is -1.
-func (c *Comment) Index() int {
- return c.index
-}
-
-// setParent replaces the comment token's parent.
-func (c *Comment) setParent(parent *Element) {
- c.parent = parent
-}
-
-// setIndex sets the Comment token's index within its parent element's Child
-// slice.
-func (c *Comment) setIndex(index int) {
- c.index = index
-}
-
-// writeTo serialies the comment to the writer.
-func (c *Comment) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteString("")
-}
-
-// NewDirective creates a parentless XML directive.
-func NewDirective(data string) *Directive {
- return newDirective(data, nil)
-}
-
-// newDirective creates an XML directive and binds it to a parent element. If
-// parent is nil, the Directive remains unbound.
-func newDirective(data string, parent *Element) *Directive {
- d := &Directive{
- Data: data,
- parent: parent,
- index: -1,
- }
- if parent != nil {
- parent.addChild(d)
- }
- return d
-}
-
-// CreateDirective creates an XML directive and adds it as the last child of
-// element e.
-func (e *Element) CreateDirective(data string) *Directive {
- return newDirective(data, e)
-}
-
-// dup duplicates the directive.
-func (d *Directive) dup(parent *Element) Token {
- return &Directive{
- Data: d.Data,
- parent: parent,
- index: d.index,
- }
-}
-
-// Parent returns directive token's parent element, or nil if it has no
-// parent.
-func (d *Directive) Parent() *Element {
- return d.parent
-}
-
-// Index returns the index of this Directive token within its parent element's
-// list of child tokens. If this Directive token has no parent element, the
-// index is -1.
-func (d *Directive) Index() int {
- return d.index
-}
-
-// setParent replaces the directive token's parent.
-func (d *Directive) setParent(parent *Element) {
- d.parent = parent
-}
-
-// setIndex sets the Directive token's index within its parent element's Child
-// slice.
-func (d *Directive) setIndex(index int) {
- d.index = index
-}
-
-// writeTo serializes the XML directive to the writer.
-func (d *Directive) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteString("")
-}
-
-// NewProcInst creates a parentless XML processing instruction.
-func NewProcInst(target, inst string) *ProcInst {
- return newProcInst(target, inst, nil)
-}
-
-// newProcInst creates an XML processing instruction and binds it to a parent
-// element. If parent is nil, the ProcInst remains unbound.
-func newProcInst(target, inst string, parent *Element) *ProcInst {
- p := &ProcInst{
- Target: target,
- Inst: inst,
- parent: parent,
- index: -1,
- }
- if parent != nil {
- parent.addChild(p)
- }
- return p
-}
-
-// CreateProcInst creates a processing instruction and adds it as a child of
-// element e.
-func (e *Element) CreateProcInst(target, inst string) *ProcInst {
- return newProcInst(target, inst, e)
-}
-
-// dup duplicates the procinst.
-func (p *ProcInst) dup(parent *Element) Token {
- return &ProcInst{
- Target: p.Target,
- Inst: p.Inst,
- parent: parent,
- index: p.index,
- }
-}
-
-// Parent returns processing instruction token's parent element, or nil if it
-// has no parent.
-func (p *ProcInst) Parent() *Element {
- return p.parent
-}
-
-// Index returns the index of this ProcInst token within its parent element's
-// list of child tokens. If this ProcInst token has no parent element, the
-// index is -1.
-func (p *ProcInst) Index() int {
- return p.index
-}
-
-// setParent replaces the processing instruction token's parent.
-func (p *ProcInst) setParent(parent *Element) {
- p.parent = parent
-}
-
-// setIndex sets the processing instruction token's index within its parent
-// element's Child slice.
-func (p *ProcInst) setIndex(index int) {
- p.index = index
-}
-
-// writeTo serializes the processing instruction to the writer.
-func (p *ProcInst) writeTo(w *bufio.Writer, s *WriteSettings) {
- w.WriteString("")
- w.WriteString(p.Target)
- if p.Inst != "" {
- w.WriteByte(' ')
- w.WriteString(p.Inst)
- }
- w.WriteString("?>")
-}
diff --git a/vendor/github.com/beevik/etree/helpers.go b/vendor/github.com/beevik/etree/helpers.go
deleted file mode 100644
index 825e14e..0000000
--- a/vendor/github.com/beevik/etree/helpers.go
+++ /dev/null
@@ -1,276 +0,0 @@
-// Copyright 2015-2019 Brett Vickers.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package etree
-
-import (
- "bufio"
- "io"
- "strings"
- "unicode/utf8"
-)
-
-// A simple stack
-type stack struct {
- data []interface{}
-}
-
-func (s *stack) empty() bool {
- return len(s.data) == 0
-}
-
-func (s *stack) push(value interface{}) {
- s.data = append(s.data, value)
-}
-
-func (s *stack) pop() interface{} {
- value := s.data[len(s.data)-1]
- s.data[len(s.data)-1] = nil
- s.data = s.data[:len(s.data)-1]
- return value
-}
-
-func (s *stack) peek() interface{} {
- return s.data[len(s.data)-1]
-}
-
-// A fifo is a simple first-in-first-out queue.
-type fifo struct {
- data []interface{}
- head, tail int
-}
-
-func (f *fifo) add(value interface{}) {
- if f.len()+1 >= len(f.data) {
- f.grow()
- }
- f.data[f.tail] = value
- if f.tail++; f.tail == len(f.data) {
- f.tail = 0
- }
-}
-
-func (f *fifo) remove() interface{} {
- value := f.data[f.head]
- f.data[f.head] = nil
- if f.head++; f.head == len(f.data) {
- f.head = 0
- }
- return value
-}
-
-func (f *fifo) len() int {
- if f.tail >= f.head {
- return f.tail - f.head
- }
- return len(f.data) - f.head + f.tail
-}
-
-func (f *fifo) grow() {
- c := len(f.data) * 2
- if c == 0 {
- c = 4
- }
- buf, count := make([]interface{}, c), f.len()
- if f.tail >= f.head {
- copy(buf[0:count], f.data[f.head:f.tail])
- } else {
- hindex := len(f.data) - f.head
- copy(buf[0:hindex], f.data[f.head:])
- copy(buf[hindex:count], f.data[:f.tail])
- }
- f.data, f.head, f.tail = buf, 0, count
-}
-
-// countReader implements a proxy reader that counts the number of
-// bytes read from its encapsulated reader.
-type countReader struct {
- r io.Reader
- bytes int64
-}
-
-func newCountReader(r io.Reader) *countReader {
- return &countReader{r: r}
-}
-
-func (cr *countReader) Read(p []byte) (n int, err error) {
- b, err := cr.r.Read(p)
- cr.bytes += int64(b)
- return b, err
-}
-
-// countWriter implements a proxy writer that counts the number of
-// bytes written by its encapsulated writer.
-type countWriter struct {
- w io.Writer
- bytes int64
-}
-
-func newCountWriter(w io.Writer) *countWriter {
- return &countWriter{w: w}
-}
-
-func (cw *countWriter) Write(p []byte) (n int, err error) {
- b, err := cw.w.Write(p)
- cw.bytes += int64(b)
- return b, err
-}
-
-// isWhitespace returns true if the byte slice contains only
-// whitespace characters.
-func isWhitespace(s string) bool {
- for i := 0; i < len(s); i++ {
- if c := s[i]; c != ' ' && c != '\t' && c != '\n' && c != '\r' {
- return false
- }
- }
- return true
-}
-
-// spaceMatch returns true if namespace a is the empty string
-// or if namespace a equals namespace b.
-func spaceMatch(a, b string) bool {
- switch {
- case a == "":
- return true
- default:
- return a == b
- }
-}
-
-// spaceDecompose breaks a namespace:tag identifier at the ':'
-// and returns the two parts.
-func spaceDecompose(str string) (space, key string) {
- colon := strings.IndexByte(str, ':')
- if colon == -1 {
- return "", str
- }
- return str[:colon], str[colon+1:]
-}
-
-// Strings used by indentCRLF and indentLF
-const (
- indentSpaces = "\r\n "
- indentTabs = "\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t"
-)
-
-// indentCRLF returns a CRLF newline followed by n copies of the first
-// non-CRLF character in the source string.
-func indentCRLF(n int, source string) string {
- switch {
- case n < 0:
- return source[:2]
- case n < len(source)-1:
- return source[:n+2]
- default:
- return source + strings.Repeat(source[2:3], n-len(source)+2)
- }
-}
-
-// indentLF returns a LF newline followed by n copies of the first non-LF
-// character in the source string.
-func indentLF(n int, source string) string {
- switch {
- case n < 0:
- return source[1:2]
- case n < len(source)-1:
- return source[1 : n+2]
- default:
- return source[1:] + strings.Repeat(source[2:3], n-len(source)+2)
- }
-}
-
-// nextIndex returns the index of the next occurrence of sep in s,
-// starting from offset. It returns -1 if the sep string is not found.
-func nextIndex(s, sep string, offset int) int {
- switch i := strings.Index(s[offset:], sep); i {
- case -1:
- return -1
- default:
- return offset + i
- }
-}
-
-// isInteger returns true if the string s contains an integer.
-func isInteger(s string) bool {
- for i := 0; i < len(s); i++ {
- if (s[i] < '0' || s[i] > '9') && !(i == 0 && s[i] == '-') {
- return false
- }
- }
- return true
-}
-
-type escapeMode byte
-
-const (
- escapeNormal escapeMode = iota
- escapeCanonicalText
- escapeCanonicalAttr
-)
-
-// escapeString writes an escaped version of a string to the writer.
-func escapeString(w *bufio.Writer, s string, m escapeMode) {
- var esc []byte
- last := 0
- for i := 0; i < len(s); {
- r, width := utf8.DecodeRuneInString(s[i:])
- i += width
- switch r {
- case '&':
- esc = []byte("&")
- case '<':
- esc = []byte("<")
- case '>':
- if m == escapeCanonicalAttr {
- continue
- }
- esc = []byte(">")
- case '\'':
- if m != escapeNormal {
- continue
- }
- esc = []byte("'")
- case '"':
- if m == escapeCanonicalText {
- continue
- }
- esc = []byte(""")
- case '\t':
- if m != escapeCanonicalAttr {
- continue
- }
- esc = []byte(" ")
- case '\n':
- if m != escapeCanonicalAttr {
- continue
- }
- esc = []byte("
")
- case '\r':
- if m == escapeNormal {
- continue
- }
- esc = []byte("
")
- default:
- if !isInCharacterRange(r) || (r == 0xFFFD && width == 1) {
- esc = []byte("\uFFFD")
- break
- }
- continue
- }
- w.WriteString(s[last : i-width])
- w.Write(esc)
- last = i
- }
- w.WriteString(s[last:])
-}
-
-func isInCharacterRange(r rune) bool {
- return r == 0x09 ||
- r == 0x0A ||
- r == 0x0D ||
- r >= 0x20 && r <= 0xD7FF ||
- r >= 0xE000 && r <= 0xFFFD ||
- r >= 0x10000 && r <= 0x10FFFF
-}
diff --git a/vendor/github.com/beevik/etree/path.go b/vendor/github.com/beevik/etree/path.go
deleted file mode 100644
index 82db0ac..0000000
--- a/vendor/github.com/beevik/etree/path.go
+++ /dev/null
@@ -1,582 +0,0 @@
-// Copyright 2015-2019 Brett Vickers.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package etree
-
-import (
- "strconv"
- "strings"
-)
-
-/*
-A Path is a string that represents a search path through an etree starting
-from the document root or an arbitrary element. Paths are used with the
-Element object's Find* methods to locate and return desired elements.
-
-A Path consists of a series of slash-separated "selectors", each of which may
-be modified by one or more bracket-enclosed "filters". Selectors are used to
-traverse the etree from element to element, while filters are used to narrow
-the list of candidate elements at each node.
-
-Although etree Path strings are similar to XPath strings
-(https://www.w3.org/TR/1999/REC-xpath-19991116/), they have a more limited set
-of selectors and filtering options.
-
-The following selectors are supported by etree Path strings:
-
- . Select the current element.
- .. Select the parent of the current element.
- * Select all child elements of the current element.
- / Select the root element when used at the start of a path.
- // Select all descendants of the current element.
- tag Select all child elements with a name matching the tag.
-
-The following basic filters are supported by etree Path strings:
-
- [@attrib] Keep elements with an attribute named attrib.
- [@attrib='val'] Keep elements with an attribute named attrib and value matching val.
- [tag] Keep elements with a child element named tag.
- [tag='val'] Keep elements with a child element named tag and text matching val.
- [n] Keep the n-th element, where n is a numeric index starting from 1.
-
-The following function filters are also supported:
-
- [text()] Keep elements with non-empty text.
- [text()='val'] Keep elements whose text matches val.
- [local-name()='val'] Keep elements whose un-prefixed tag matches val.
- [name()='val'] Keep elements whose full tag exactly matches val.
- [namespace-prefix()='val'] Keep elements whose namespace prefix matches val.
- [namespace-uri()='val'] Keep elements whose namespace URI matches val.
-
-Here are some examples of Path strings:
-
-- Select the bookstore child element of the root element:
- /bookstore
-
-- Beginning from the root element, select the title elements of all
-descendant book elements having a 'category' attribute of 'WEB':
- //book[@category='WEB']/title
-
-- Beginning from the current element, select the first descendant
-book element with a title child element containing the text 'Great
-Expectations':
- .//book[title='Great Expectations'][1]
-
-- Beginning from the current element, select all child elements of
-book elements with an attribute 'language' set to 'english':
- ./book/*[@language='english']
-
-- Beginning from the current element, select all child elements of
-book elements containing the text 'special':
- ./book/*[text()='special']
-
-- Beginning from the current element, select all descendant book
-elements whose title child element has a 'language' attribute of 'french':
- .//book/title[@language='french']/..
-
-- Beginning from the current element, select all book elements
-belonging to the http://www.w3.org/TR/html4/ namespace:
- .//book[namespace-uri()='http://www.w3.org/TR/html4/']
-
-*/
-type Path struct {
- segments []segment
-}
-
-// ErrPath is returned by path functions when an invalid etree path is provided.
-type ErrPath string
-
-// Error returns the string describing a path error.
-func (err ErrPath) Error() string {
- return "etree: " + string(err)
-}
-
-// CompilePath creates an optimized version of an XPath-like string that
-// can be used to query elements in an element tree.
-func CompilePath(path string) (Path, error) {
- var comp compiler
- segments := comp.parsePath(path)
- if comp.err != ErrPath("") {
- return Path{nil}, comp.err
- }
- return Path{segments}, nil
-}
-
-// MustCompilePath creates an optimized version of an XPath-like string that
-// can be used to query elements in an element tree. Panics if an error
-// occurs. Use this function to create Paths when you know the path is
-// valid (i.e., if it's hard-coded).
-func MustCompilePath(path string) Path {
- p, err := CompilePath(path)
- if err != nil {
- panic(err)
- }
- return p
-}
-
-// A segment is a portion of a path between "/" characters.
-// It contains one selector and zero or more [filters].
-type segment struct {
- sel selector
- filters []filter
-}
-
-func (seg *segment) apply(e *Element, p *pather) {
- seg.sel.apply(e, p)
- for _, f := range seg.filters {
- f.apply(p)
- }
-}
-
-// A selector selects XML elements for consideration by the
-// path traversal.
-type selector interface {
- apply(e *Element, p *pather)
-}
-
-// A filter pares down a list of candidate XML elements based
-// on a path filter in [brackets].
-type filter interface {
- apply(p *pather)
-}
-
-// A pather is helper object that traverses an element tree using
-// a Path object. It collects and deduplicates all elements matching
-// the path query.
-type pather struct {
- queue fifo
- results []*Element
- inResults map[*Element]bool
- candidates []*Element
- scratch []*Element // used by filters
-}
-
-// A node represents an element and the remaining path segments that
-// should be applied against it by the pather.
-type node struct {
- e *Element
- segments []segment
-}
-
-func newPather() *pather {
- return &pather{
- results: make([]*Element, 0),
- inResults: make(map[*Element]bool),
- candidates: make([]*Element, 0),
- scratch: make([]*Element, 0),
- }
-}
-
-// traverse follows the path from the element e, collecting
-// and then returning all elements that match the path's selectors
-// and filters.
-func (p *pather) traverse(e *Element, path Path) []*Element {
- for p.queue.add(node{e, path.segments}); p.queue.len() > 0; {
- p.eval(p.queue.remove().(node))
- }
- return p.results
-}
-
-// eval evalutes the current path node by applying the remaining
-// path's selector rules against the node's element.
-func (p *pather) eval(n node) {
- p.candidates = p.candidates[0:0]
- seg, remain := n.segments[0], n.segments[1:]
- seg.apply(n.e, p)
-
- if len(remain) == 0 {
- for _, c := range p.candidates {
- if in := p.inResults[c]; !in {
- p.inResults[c] = true
- p.results = append(p.results, c)
- }
- }
- } else {
- for _, c := range p.candidates {
- p.queue.add(node{c, remain})
- }
- }
-}
-
-// A compiler generates a compiled path from a path string.
-type compiler struct {
- err ErrPath
-}
-
-// parsePath parses an XPath-like string describing a path
-// through an element tree and returns a slice of segment
-// descriptors.
-func (c *compiler) parsePath(path string) []segment {
- // If path ends with //, fix it
- if strings.HasSuffix(path, "//") {
- path = path + "*"
- }
-
- var segments []segment
-
- // Check for an absolute path
- if strings.HasPrefix(path, "/") {
- segments = append(segments, segment{new(selectRoot), []filter{}})
- path = path[1:]
- }
-
- // Split path into segments
- for _, s := range splitPath(path) {
- segments = append(segments, c.parseSegment(s))
- if c.err != ErrPath("") {
- break
- }
- }
- return segments
-}
-
-func splitPath(path string) []string {
- pieces := make([]string, 0)
- start := 0
- inquote := false
- for i := 0; i+1 <= len(path); i++ {
- if path[i] == '\'' {
- inquote = !inquote
- } else if path[i] == '/' && !inquote {
- pieces = append(pieces, path[start:i])
- start = i + 1
- }
- }
- return append(pieces, path[start:])
-}
-
-// parseSegment parses a path segment between / characters.
-func (c *compiler) parseSegment(path string) segment {
- pieces := strings.Split(path, "[")
- seg := segment{
- sel: c.parseSelector(pieces[0]),
- filters: []filter{},
- }
- for i := 1; i < len(pieces); i++ {
- fpath := pieces[i]
- if fpath[len(fpath)-1] != ']' {
- c.err = ErrPath("path has invalid filter [brackets].")
- break
- }
- seg.filters = append(seg.filters, c.parseFilter(fpath[:len(fpath)-1]))
- }
- return seg
-}
-
-// parseSelector parses a selector at the start of a path segment.
-func (c *compiler) parseSelector(path string) selector {
- switch path {
- case ".":
- return new(selectSelf)
- case "..":
- return new(selectParent)
- case "*":
- return new(selectChildren)
- case "":
- return new(selectDescendants)
- default:
- return newSelectChildrenByTag(path)
- }
-}
-
-var fnTable = map[string]struct {
- hasFn func(e *Element) bool
- getValFn func(e *Element) string
-}{
- "local-name": {nil, (*Element).name},
- "name": {nil, (*Element).FullTag},
- "namespace-prefix": {nil, (*Element).namespacePrefix},
- "namespace-uri": {nil, (*Element).NamespaceURI},
- "text": {(*Element).hasText, (*Element).Text},
-}
-
-// parseFilter parses a path filter contained within [brackets].
-func (c *compiler) parseFilter(path string) filter {
- if len(path) == 0 {
- c.err = ErrPath("path contains an empty filter expression.")
- return nil
- }
-
- // Filter contains [@attr='val'], [fn()='val'], or [tag='val']?
- eqindex := strings.Index(path, "='")
- if eqindex >= 0 {
- rindex := nextIndex(path, "'", eqindex+2)
- if rindex != len(path)-1 {
- c.err = ErrPath("path has mismatched filter quotes.")
- return nil
- }
-
- key := path[:eqindex]
- value := path[eqindex+2 : rindex]
-
- switch {
- case key[0] == '@':
- return newFilterAttrVal(key[1:], value)
- case strings.HasSuffix(key, "()"):
- fn := key[:len(key)-2]
- if t, ok := fnTable[fn]; ok && t.getValFn != nil {
- return newFilterFuncVal(t.getValFn, value)
- }
- c.err = ErrPath("path has unknown function " + fn)
- return nil
- default:
- return newFilterChildText(key, value)
- }
- }
-
- // Filter contains [@attr], [N], [tag] or [fn()]
- switch {
- case path[0] == '@':
- return newFilterAttr(path[1:])
- case strings.HasSuffix(path, "()"):
- fn := path[:len(path)-2]
- if t, ok := fnTable[fn]; ok && t.hasFn != nil {
- return newFilterFunc(t.hasFn)
- }
- c.err = ErrPath("path has unknown function " + fn)
- return nil
- case isInteger(path):
- pos, _ := strconv.Atoi(path)
- switch {
- case pos > 0:
- return newFilterPos(pos - 1)
- default:
- return newFilterPos(pos)
- }
- default:
- return newFilterChild(path)
- }
-}
-
-// selectSelf selects the current element into the candidate list.
-type selectSelf struct{}
-
-func (s *selectSelf) apply(e *Element, p *pather) {
- p.candidates = append(p.candidates, e)
-}
-
-// selectRoot selects the element's root node.
-type selectRoot struct{}
-
-func (s *selectRoot) apply(e *Element, p *pather) {
- root := e
- for root.parent != nil {
- root = root.parent
- }
- p.candidates = append(p.candidates, root)
-}
-
-// selectParent selects the element's parent into the candidate list.
-type selectParent struct{}
-
-func (s *selectParent) apply(e *Element, p *pather) {
- if e.parent != nil {
- p.candidates = append(p.candidates, e.parent)
- }
-}
-
-// selectChildren selects the element's child elements into the
-// candidate list.
-type selectChildren struct{}
-
-func (s *selectChildren) apply(e *Element, p *pather) {
- for _, c := range e.Child {
- if c, ok := c.(*Element); ok {
- p.candidates = append(p.candidates, c)
- }
- }
-}
-
-// selectDescendants selects all descendant child elements
-// of the element into the candidate list.
-type selectDescendants struct{}
-
-func (s *selectDescendants) apply(e *Element, p *pather) {
- var queue fifo
- for queue.add(e); queue.len() > 0; {
- e := queue.remove().(*Element)
- p.candidates = append(p.candidates, e)
- for _, c := range e.Child {
- if c, ok := c.(*Element); ok {
- queue.add(c)
- }
- }
- }
-}
-
-// selectChildrenByTag selects into the candidate list all child
-// elements of the element having the specified tag.
-type selectChildrenByTag struct {
- space, tag string
-}
-
-func newSelectChildrenByTag(path string) *selectChildrenByTag {
- s, l := spaceDecompose(path)
- return &selectChildrenByTag{s, l}
-}
-
-func (s *selectChildrenByTag) apply(e *Element, p *pather) {
- for _, c := range e.Child {
- if c, ok := c.(*Element); ok && spaceMatch(s.space, c.Space) && s.tag == c.Tag {
- p.candidates = append(p.candidates, c)
- }
- }
-}
-
-// filterPos filters the candidate list, keeping only the
-// candidate at the specified index.
-type filterPos struct {
- index int
-}
-
-func newFilterPos(pos int) *filterPos {
- return &filterPos{pos}
-}
-
-func (f *filterPos) apply(p *pather) {
- if f.index >= 0 {
- if f.index < len(p.candidates) {
- p.scratch = append(p.scratch, p.candidates[f.index])
- }
- } else {
- if -f.index <= len(p.candidates) {
- p.scratch = append(p.scratch, p.candidates[len(p.candidates)+f.index])
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterAttr filters the candidate list for elements having
-// the specified attribute.
-type filterAttr struct {
- space, key string
-}
-
-func newFilterAttr(str string) *filterAttr {
- s, l := spaceDecompose(str)
- return &filterAttr{s, l}
-}
-
-func (f *filterAttr) apply(p *pather) {
- for _, c := range p.candidates {
- for _, a := range c.Attr {
- if spaceMatch(f.space, a.Space) && f.key == a.Key {
- p.scratch = append(p.scratch, c)
- break
- }
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterAttrVal filters the candidate list for elements having
-// the specified attribute with the specified value.
-type filterAttrVal struct {
- space, key, val string
-}
-
-func newFilterAttrVal(str, value string) *filterAttrVal {
- s, l := spaceDecompose(str)
- return &filterAttrVal{s, l, value}
-}
-
-func (f *filterAttrVal) apply(p *pather) {
- for _, c := range p.candidates {
- for _, a := range c.Attr {
- if spaceMatch(f.space, a.Space) && f.key == a.Key && f.val == a.Value {
- p.scratch = append(p.scratch, c)
- break
- }
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterFunc filters the candidate list for elements satisfying a custom
-// boolean function.
-type filterFunc struct {
- fn func(e *Element) bool
-}
-
-func newFilterFunc(fn func(e *Element) bool) *filterFunc {
- return &filterFunc{fn}
-}
-
-func (f *filterFunc) apply(p *pather) {
- for _, c := range p.candidates {
- if f.fn(c) {
- p.scratch = append(p.scratch, c)
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterFuncVal filters the candidate list for elements containing a value
-// matching the result of a custom function.
-type filterFuncVal struct {
- fn func(e *Element) string
- val string
-}
-
-func newFilterFuncVal(fn func(e *Element) string, value string) *filterFuncVal {
- return &filterFuncVal{fn, value}
-}
-
-func (f *filterFuncVal) apply(p *pather) {
- for _, c := range p.candidates {
- if f.fn(c) == f.val {
- p.scratch = append(p.scratch, c)
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterChild filters the candidate list for elements having
-// a child element with the specified tag.
-type filterChild struct {
- space, tag string
-}
-
-func newFilterChild(str string) *filterChild {
- s, l := spaceDecompose(str)
- return &filterChild{s, l}
-}
-
-func (f *filterChild) apply(p *pather) {
- for _, c := range p.candidates {
- for _, cc := range c.Child {
- if cc, ok := cc.(*Element); ok &&
- spaceMatch(f.space, cc.Space) &&
- f.tag == cc.Tag {
- p.scratch = append(p.scratch, c)
- }
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
-
-// filterChildText filters the candidate list for elements having
-// a child element with the specified tag and text.
-type filterChildText struct {
- space, tag, text string
-}
-
-func newFilterChildText(str, text string) *filterChildText {
- s, l := spaceDecompose(str)
- return &filterChildText{s, l, text}
-}
-
-func (f *filterChildText) apply(p *pather) {
- for _, c := range p.candidates {
- for _, cc := range c.Child {
- if cc, ok := cc.(*Element); ok &&
- spaceMatch(f.space, cc.Space) &&
- f.tag == cc.Tag &&
- f.text == cc.Text() {
- p.scratch = append(p.scratch, c)
- }
- }
- }
- p.candidates, p.scratch = p.scratch, p.candidates[0:0]
-}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore b/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
new file mode 100644
index 0000000..fb22c99
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
@@ -0,0 +1,4 @@
+.vscode
+.idea
+*.swp
+jv
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE b/vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE
new file mode 100644
index 0000000..19dc35b
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE
@@ -0,0 +1,175 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
\ No newline at end of file
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md b/vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
new file mode 100644
index 0000000..8c66ba0
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
@@ -0,0 +1,215 @@
+# jsonschema v5.1.1
+
+[](https://opensource.org/licenses/Apache-2.0)
+[](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
+[](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema/v5)
+[](https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml)
+[](https://codecov.io/github/santhosh-tekuri/jsonschema?branch=master)
+
+Package jsonschema provides json-schema compilation and validation.
+
+[Benchmarks](https://dev.to/vearutop/benchmarking-correctness-and-performance-of-go-json-schema-validators-3247)
+
+### Features:
+ - implements
+ [draft 2020-12](https://json-schema.org/specification-links.html#2020-12),
+ [draft 2019-09](https://json-schema.org/specification-links.html#draft-2019-09-formerly-known-as-draft-8),
+ [draft-7](https://json-schema.org/specification-links.html#draft-7),
+ [draft-6](https://json-schema.org/specification-links.html#draft-6),
+ [draft-4](https://json-schema.org/specification-links.html#draft-4)
+ - fully compliant with [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite), (excluding some optional)
+ - list of optional tests that are excluded can be found in schema_test.go(variable [skipTests](https://github.com/santhosh-tekuri/jsonschema/blob/master/schema_test.go#L24))
+ - validates schemas against meta-schema
+ - full support of remote references
+ - support of recursive references between schemas
+ - detects infinite loop in schemas
+ - thread safe validation
+ - rich, intuitive hierarchial error messages with json-pointers to exact location
+ - supports output formats flag, basic and detailed
+ - supports enabling format and content Assertions in draft2019-09 or above
+ - change `Compiler.AssertFormat`, `Compiler.AssertContent` to `true`
+ - compiled schema can be introspected. easier to develop tools like generating go structs given schema
+ - supports user-defined keywords via [extensions](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-Extension)
+ - implements following formats (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedFormat))
+ - date-time, date, time, duration, period (supports leap-second)
+ - uuid, hostname, email
+ - ip-address, ipv4, ipv6
+ - uri, uriref, uri-template(limited validation)
+ - json-pointer, relative-json-pointer
+ - regex, format
+ - implements following contentEncoding (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
+ - base64
+ - implements following contentMediaType (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
+ - application/json
+ - can load from files/http/https/[string](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-FromString)/[]byte/io.Reader (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedLoader))
+
+
+see examples in [godoc](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
+
+The schema is compiled against the version specified in `$schema` property.
+If "$schema" property is missing, it uses latest draft which currently implemented
+by this library.
+
+You can force to use specific version, when `$schema` is missing, as follows:
+
+```go
+compiler := jsonschema.NewCompiler()
+compiler.Draft = jsonschema.Draft4
+```
+
+This package supports loading json-schema from filePath and fileURL.
+
+To load json-schema from HTTPURL, add following import:
+
+```go
+import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
+```
+
+## Rich Errors
+
+The ValidationError returned by Validate method contains detailed context to understand why and where the error is.
+
+schema.json:
+```json
+{
+ "$ref": "t.json#/definitions/employee"
+}
+```
+
+t.json:
+```json
+{
+ "definitions": {
+ "employee": {
+ "type": "string"
+ }
+ }
+}
+```
+
+doc.json:
+```json
+1
+```
+
+assuming `err` is the ValidationError returned when `doc.json` validated with `schema.json`,
+```go
+fmt.Printf("%#v\n", err) // using %#v prints errors hierarchy
+```
+Prints:
+```
+[I#] [S#] doesn't validate with file:///Users/santhosh/jsonschema/schema.json#
+ [I#] [S#/$ref] doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'
+ [I#] [S#/definitions/employee/type] expected string, but got number
+```
+
+Here `I` stands for instance document and `S` stands for schema document.
+The json-fragments that caused error in instance and schema documents are represented using json-pointer notation.
+Nested causes are printed with indent.
+
+To output `err` in `flag` output format:
+```go
+b, _ := json.MarshalIndent(err.FlagOutput(), "", " ")
+fmt.Println(string(b))
+```
+Prints:
+```json
+{
+ "valid": false
+}
+```
+To output `err` in `basic` output format:
+```go
+b, _ := json.MarshalIndent(err.BasicOutput(), "", " ")
+fmt.Println(string(b))
+```
+Prints:
+```json
+{
+ "valid": false,
+ "errors": [
+ {
+ "keywordLocation": "",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
+ "instanceLocation": "",
+ "error": "doesn't validate with file:///Users/santhosh/jsonschema/schema.json#"
+ },
+ {
+ "keywordLocation": "/$ref",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
+ "instanceLocation": "",
+ "error": "doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'"
+ },
+ {
+ "keywordLocation": "/$ref/type",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
+ "instanceLocation": "",
+ "error": "expected string, but got number"
+ }
+ ]
+}
+```
+To output `err` in `detailed` output format:
+```go
+b, _ := json.MarshalIndent(err.DetailedOutput(), "", " ")
+fmt.Println(string(b))
+```
+Prints:
+```json
+{
+ "valid": false,
+ "keywordLocation": "",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
+ "instanceLocation": "",
+ "errors": [
+ {
+ "valid": false,
+ "keywordLocation": "/$ref",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
+ "instanceLocation": "",
+ "errors": [
+ {
+ "valid": false,
+ "keywordLocation": "/$ref/type",
+ "absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
+ "instanceLocation": "",
+ "error": "expected string, but got number"
+ }
+ ]
+ }
+ ]
+}
+```
+
+## CLI
+
+to install `go install github.com/santhosh-tekuri/jsonschema/v5/cmd/jv@latest`
+
+```bash
+jv [-draft INT] [-output FORMAT] [-assertformat] [-assertcontent] []...
+ -assertcontent
+ enable content assertions with draft >= 2019
+ -assertformat
+ enable format assertions with draft >= 2019
+ -draft int
+ draft used when '$schema' attribute is missing. valid values 4, 5, 7, 2019, 2020 (default 2020)
+ -output string
+ output format. valid values flag, basic, detailed
+```
+
+if no `` arguments are passed, it simply validates the ``.
+if `$schema` attribute is missing in schema, it uses latest version. this can be overridden by passing `-draft` flag
+
+exit-code is 1, if there are any validation errors
+
+## Validating YAML Documents
+
+since yaml supports non-string keys, such yaml documents are rendered as invalid json documents.
+yaml parser returns `map[interface{}]interface{}` for object, whereas json parser returns `map[string]interface{}`.
+this package accepts only `map[string]interface{}`, so we need to manually convert them to `map[string]interface{}`
+
+https://play.golang.org/p/Hhax3MrtD8r
+
+the above example shows how to validate yaml document with jsonschema.
+the conversion explained above is implemented by `toStringKeys` function
+
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
new file mode 100644
index 0000000..3f3cc80
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
@@ -0,0 +1,771 @@
+package jsonschema
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "math/big"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+// A Compiler represents a json-schema compiler.
+type Compiler struct {
+ // Draft represents the draft used when '$schema' attribute is missing.
+ //
+ // This defaults to latest supported draft (currently 2020-12).
+ Draft *Draft
+ resources map[string]*resource
+
+ // Extensions is used to register extensions.
+ extensions map[string]extension
+
+ // ExtractAnnotations tells whether schema annotations has to be extracted
+ // in compiled Schema or not.
+ ExtractAnnotations bool
+
+ // LoadURL loads the document at given absolute URL.
+ //
+ // If nil, package global LoadURL is used.
+ LoadURL func(s string) (io.ReadCloser, error)
+
+ // AssertFormat for specifications >= draft2019-09.
+ AssertFormat bool
+
+ // AssertContent for specifications >= draft2019-09.
+ AssertContent bool
+}
+
+// Compile parses json-schema at given url returns, if successful,
+// a Schema object that can be used to match against json.
+//
+// Returned error can be *SchemaError
+func Compile(url string) (*Schema, error) {
+ return NewCompiler().Compile(url)
+}
+
+// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
+// It simplifies safe initialization of global variables holding compiled Schemas.
+func MustCompile(url string) *Schema {
+ return NewCompiler().MustCompile(url)
+}
+
+// CompileString parses and compiles the given schema with given base url.
+func CompileString(url, schema string) (*Schema, error) {
+ c := NewCompiler()
+ if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
+ return nil, err
+ }
+ return c.Compile(url)
+}
+
+// MustCompileString is like CompileString but panics on error.
+// It simplified safe initialization of global variables holding compiled Schema.
+func MustCompileString(url, schema string) *Schema {
+ c := NewCompiler()
+ if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
+ panic(err)
+ }
+ return c.MustCompile(url)
+}
+
+// NewCompiler returns a json-schema Compiler object.
+// if '$schema' attribute is missing, it is treated as draft7. to change this
+// behavior change Compiler.Draft value
+func NewCompiler() *Compiler {
+ return &Compiler{Draft: latest, resources: make(map[string]*resource), extensions: make(map[string]extension)}
+}
+
+// AddResource adds in-memory resource to the compiler.
+//
+// Note that url must not have fragment
+func (c *Compiler) AddResource(url string, r io.Reader) error {
+ res, err := newResource(url, r)
+ if err != nil {
+ return err
+ }
+ c.resources[res.url] = res
+ return nil
+}
+
+// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
+// It simplifies safe initialization of global variables holding compiled Schemas.
+func (c *Compiler) MustCompile(url string) *Schema {
+ s, err := c.Compile(url)
+ if err != nil {
+ panic(fmt.Sprintf("jsonschema: %#v", err))
+ }
+ return s
+}
+
+// Compile parses json-schema at given url returns, if successful,
+// a Schema object that can be used to match against json.
+//
+// error returned will be of type *SchemaError
+func (c *Compiler) Compile(url string) (*Schema, error) {
+ // make url absolute
+ u, err := toAbs(url)
+ if err != nil {
+ return nil, &SchemaError{url, err}
+ }
+ url = u
+
+ sch, err := c.compileURL(url, nil, "#")
+ if err != nil {
+ err = &SchemaError{url, err}
+ }
+ return sch, err
+}
+
+func (c *Compiler) findResource(url string) (*resource, error) {
+ if _, ok := c.resources[url]; !ok {
+ // load resource
+ var rdr io.Reader
+ if sch, ok := vocabSchemas[url]; ok {
+ rdr = strings.NewReader(sch)
+ } else {
+ loadURL := LoadURL
+ if c.LoadURL != nil {
+ loadURL = c.LoadURL
+ }
+ r, err := loadURL(url)
+ if err != nil {
+ return nil, err
+ }
+ defer r.Close()
+ rdr = r
+ }
+ if err := c.AddResource(url, rdr); err != nil {
+ return nil, err
+ }
+ }
+
+ r := c.resources[url]
+ if r.draft != nil {
+ return r, nil
+ }
+
+ // set draft
+ r.draft = c.Draft
+ if m, ok := r.doc.(map[string]interface{}); ok {
+ if sch, ok := m["$schema"]; ok {
+ sch, ok := sch.(string)
+ if !ok {
+ return nil, fmt.Errorf("jsonschema: invalid $schema in %s", url)
+ }
+ if !isURI(sch) {
+ return nil, fmt.Errorf("jsonschema: $schema must be uri in %s", url)
+ }
+ r.draft = findDraft(sch)
+ if r.draft == nil {
+ sch, _ := split(sch)
+ if sch == url {
+ return nil, fmt.Errorf("jsonschema: unsupported draft in %s", url)
+ }
+ mr, err := c.findResource(sch)
+ if err != nil {
+ return nil, err
+ }
+ r.draft = mr.draft
+ }
+ }
+ }
+
+ id, err := r.draft.resolveID(r.url, r.doc)
+ if err != nil {
+ return nil, err
+ }
+ if id != "" {
+ r.url = id
+ }
+
+ if err := r.fillSubschemas(c, r); err != nil {
+ return nil, err
+ }
+
+ return r, nil
+}
+
+func (c *Compiler) compileURL(url string, stack []schemaRef, ptr string) (*Schema, error) {
+ // if url points to a draft, return Draft.meta
+ if d := findDraft(url); d != nil && d.meta != nil {
+ return d.meta, nil
+ }
+
+ b, f := split(url)
+ r, err := c.findResource(b)
+ if err != nil {
+ return nil, err
+ }
+ return c.compileRef(r, stack, ptr, r, f)
+}
+
+func (c *Compiler) compileRef(r *resource, stack []schemaRef, refPtr string, res *resource, ref string) (*Schema, error) {
+ base := r.baseURL(res.floc)
+ ref, err := resolveURL(base, ref)
+ if err != nil {
+ return nil, err
+ }
+
+ u, f := split(ref)
+ sr := r.findResource(u)
+ if sr == nil {
+ // external resource
+ return c.compileURL(ref, stack, refPtr)
+ }
+
+ // ensure root resource is always compiled first.
+ // this is required to get schema.meta from root resource
+ if r.schema == nil {
+ r.schema = newSchema(r.url, r.floc, r.doc)
+ if _, err := c.compile(r, nil, schemaRef{"#", r.schema, false}, r); err != nil {
+ return nil, err
+ }
+ }
+
+ sr, err = r.resolveFragment(c, sr, f)
+ if err != nil {
+ return nil, err
+ }
+ if sr == nil {
+ return nil, fmt.Errorf("jsonschema: %s not found", ref)
+ }
+
+ if sr.schema != nil {
+ if err := checkLoop(stack, schemaRef{refPtr, sr.schema, false}); err != nil {
+ return nil, err
+ }
+ return sr.schema, nil
+ }
+
+ sr.schema = newSchema(r.url, sr.floc, sr.doc)
+ return c.compile(r, stack, schemaRef{refPtr, sr.schema, false}, sr)
+}
+
+func (c *Compiler) compileDynamicAnchors(r *resource, res *resource) error {
+ if r.draft.version < 2020 {
+ return nil
+ }
+
+ rr := r.listResources(res)
+ rr = append(rr, res)
+ for _, sr := range rr {
+ if m, ok := sr.doc.(map[string]interface{}); ok {
+ if _, ok := m["$dynamicAnchor"]; ok {
+ sch, err := c.compileRef(r, nil, "IGNORED", r, sr.floc)
+ if err != nil {
+ return err
+ }
+ res.schema.dynamicAnchors = append(res.schema.dynamicAnchors, sch)
+ }
+ }
+ }
+ return nil
+}
+
+func (c *Compiler) compile(r *resource, stack []schemaRef, sref schemaRef, res *resource) (*Schema, error) {
+ if err := c.compileDynamicAnchors(r, res); err != nil {
+ return nil, err
+ }
+
+ switch v := res.doc.(type) {
+ case bool:
+ res.schema.Always = &v
+ return res.schema, nil
+ default:
+ return res.schema, c.compileMap(r, stack, sref, res)
+ }
+}
+
+func (c *Compiler) compileMap(r *resource, stack []schemaRef, sref schemaRef, res *resource) error {
+ m := res.doc.(map[string]interface{})
+
+ if err := checkLoop(stack, sref); err != nil {
+ return err
+ }
+ stack = append(stack, sref)
+
+ var s = res.schema
+ var err error
+
+ if r == res { // root schema
+ if sch, ok := m["$schema"]; ok {
+ sch := sch.(string)
+ if d := findDraft(sch); d != nil {
+ s.meta = d.meta
+ } else {
+ if s.meta, err = c.compileRef(r, stack, "$schema", res, sch); err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ if ref, ok := m["$ref"]; ok {
+ s.Ref, err = c.compileRef(r, stack, "$ref", res, ref.(string))
+ if err != nil {
+ return err
+ }
+ if r.draft.version < 2019 {
+ // All other properties in a "$ref" object MUST be ignored
+ return nil
+ }
+ }
+
+ if r.draft.version >= 2019 {
+ if r == res { // root schema
+ if vocab, ok := m["$vocabulary"]; ok {
+ for url := range vocab.(map[string]interface{}) {
+ if !r.draft.isVocab(url) {
+ return fmt.Errorf("jsonschema: unsupported vocab %q in %s", url, res)
+ }
+ s.vocab = append(s.vocab, url)
+ }
+ } else {
+ s.vocab = r.draft.defaultVocab
+ }
+ }
+
+ if ref, ok := m["$recursiveRef"]; ok {
+ s.RecursiveRef, err = c.compileRef(r, stack, "$recursiveRef", res, ref.(string))
+ if err != nil {
+ return err
+ }
+ }
+ }
+ if r.draft.version >= 2020 {
+ if dref, ok := m["$dynamicRef"]; ok {
+ s.DynamicRef, err = c.compileRef(r, stack, "$dynamicRef", res, dref.(string))
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ loadInt := func(pname string) int {
+ if num, ok := m[pname]; ok {
+ i, _ := num.(json.Number).Float64()
+ return int(i)
+ }
+ return -1
+ }
+
+ loadRat := func(pname string) *big.Rat {
+ if num, ok := m[pname]; ok {
+ r, _ := new(big.Rat).SetString(string(num.(json.Number)))
+ return r
+ }
+ return nil
+ }
+
+ if r.draft.version < 2019 || r.schema.meta.hasVocab("validation") {
+ if t, ok := m["type"]; ok {
+ switch t := t.(type) {
+ case string:
+ s.Types = []string{t}
+ case []interface{}:
+ s.Types = toStrings(t)
+ }
+ }
+
+ if e, ok := m["enum"]; ok {
+ s.Enum = e.([]interface{})
+ allPrimitives := true
+ for _, item := range s.Enum {
+ switch jsonType(item) {
+ case "object", "array":
+ allPrimitives = false
+ break
+ }
+ }
+ s.enumError = "enum failed"
+ if allPrimitives {
+ if len(s.Enum) == 1 {
+ s.enumError = fmt.Sprintf("value must be %#v", s.Enum[0])
+ } else {
+ strEnum := make([]string, len(s.Enum))
+ for i, item := range s.Enum {
+ strEnum[i] = fmt.Sprintf("%#v", item)
+ }
+ s.enumError = fmt.Sprintf("value must be one of %s", strings.Join(strEnum, ", "))
+ }
+ }
+ }
+
+ s.Minimum = loadRat("minimum")
+ if exclusive, ok := m["exclusiveMinimum"]; ok {
+ if exclusive, ok := exclusive.(bool); ok {
+ if exclusive {
+ s.Minimum, s.ExclusiveMinimum = nil, s.Minimum
+ }
+ } else {
+ s.ExclusiveMinimum = loadRat("exclusiveMinimum")
+ }
+ }
+
+ s.Maximum = loadRat("maximum")
+ if exclusive, ok := m["exclusiveMaximum"]; ok {
+ if exclusive, ok := exclusive.(bool); ok {
+ if exclusive {
+ s.Maximum, s.ExclusiveMaximum = nil, s.Maximum
+ }
+ } else {
+ s.ExclusiveMaximum = loadRat("exclusiveMaximum")
+ }
+ }
+
+ s.MultipleOf = loadRat("multipleOf")
+
+ s.MinProperties, s.MaxProperties = loadInt("minProperties"), loadInt("maxProperties")
+
+ if req, ok := m["required"]; ok {
+ s.Required = toStrings(req.([]interface{}))
+ }
+
+ s.MinItems, s.MaxItems = loadInt("minItems"), loadInt("maxItems")
+
+ if unique, ok := m["uniqueItems"]; ok {
+ s.UniqueItems = unique.(bool)
+ }
+
+ s.MinLength, s.MaxLength = loadInt("minLength"), loadInt("maxLength")
+
+ if pattern, ok := m["pattern"]; ok {
+ s.Pattern = regexp.MustCompile(pattern.(string))
+ }
+
+ if r.draft.version >= 2019 {
+ s.MinContains, s.MaxContains = loadInt("minContains"), loadInt("maxContains")
+ if s.MinContains == -1 {
+ s.MinContains = 1
+ }
+
+ if deps, ok := m["dependentRequired"]; ok {
+ deps := deps.(map[string]interface{})
+ s.DependentRequired = make(map[string][]string, len(deps))
+ for pname, pvalue := range deps {
+ s.DependentRequired[pname] = toStrings(pvalue.([]interface{}))
+ }
+ }
+ }
+ }
+
+ compile := func(stack []schemaRef, ptr string) (*Schema, error) {
+ return c.compileRef(r, stack, ptr, res, r.url+res.floc+"/"+ptr)
+ }
+
+ loadSchema := func(pname string, stack []schemaRef) (*Schema, error) {
+ if _, ok := m[pname]; ok {
+ return compile(stack, escape(pname))
+ }
+ return nil, nil
+ }
+
+ loadSchemas := func(pname string, stack []schemaRef) ([]*Schema, error) {
+ if pvalue, ok := m[pname]; ok {
+ pvalue := pvalue.([]interface{})
+ schemas := make([]*Schema, len(pvalue))
+ for i := range pvalue {
+ sch, err := compile(stack, escape(pname)+"/"+strconv.Itoa(i))
+ if err != nil {
+ return nil, err
+ }
+ schemas[i] = sch
+ }
+ return schemas, nil
+ }
+ return nil, nil
+ }
+
+ if r.draft.version < 2019 || r.schema.meta.hasVocab("applicator") {
+ if s.Not, err = loadSchema("not", stack); err != nil {
+ return err
+ }
+ if s.AllOf, err = loadSchemas("allOf", stack); err != nil {
+ return err
+ }
+ if s.AnyOf, err = loadSchemas("anyOf", stack); err != nil {
+ return err
+ }
+ if s.OneOf, err = loadSchemas("oneOf", stack); err != nil {
+ return err
+ }
+
+ if props, ok := m["properties"]; ok {
+ props := props.(map[string]interface{})
+ s.Properties = make(map[string]*Schema, len(props))
+ for pname := range props {
+ s.Properties[pname], err = compile(nil, "properties/"+escape(pname))
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ if regexProps, ok := m["regexProperties"]; ok {
+ s.RegexProperties = regexProps.(bool)
+ }
+
+ if patternProps, ok := m["patternProperties"]; ok {
+ patternProps := patternProps.(map[string]interface{})
+ s.PatternProperties = make(map[*regexp.Regexp]*Schema, len(patternProps))
+ for pattern := range patternProps {
+ s.PatternProperties[regexp.MustCompile(pattern)], err = compile(nil, "patternProperties/"+escape(pattern))
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ if additionalProps, ok := m["additionalProperties"]; ok {
+ switch additionalProps := additionalProps.(type) {
+ case bool:
+ s.AdditionalProperties = additionalProps
+ case map[string]interface{}:
+ s.AdditionalProperties, err = compile(nil, "additionalProperties")
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ if deps, ok := m["dependencies"]; ok {
+ deps := deps.(map[string]interface{})
+ s.Dependencies = make(map[string]interface{}, len(deps))
+ for pname, pvalue := range deps {
+ switch pvalue := pvalue.(type) {
+ case []interface{}:
+ s.Dependencies[pname] = toStrings(pvalue)
+ default:
+ s.Dependencies[pname], err = compile(stack, "dependencies/"+escape(pname))
+ if err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ if r.draft.version >= 6 {
+ if s.PropertyNames, err = loadSchema("propertyNames", nil); err != nil {
+ return err
+ }
+ if s.Contains, err = loadSchema("contains", nil); err != nil {
+ return err
+ }
+ }
+
+ if r.draft.version >= 7 {
+ if m["if"] != nil {
+ if s.If, err = loadSchema("if", stack); err != nil {
+ return err
+ }
+ if s.Then, err = loadSchema("then", stack); err != nil {
+ return err
+ }
+ if s.Else, err = loadSchema("else", stack); err != nil {
+ return err
+ }
+ }
+ }
+ if r.draft.version >= 2019 {
+ if deps, ok := m["dependentSchemas"]; ok {
+ deps := deps.(map[string]interface{})
+ s.DependentSchemas = make(map[string]*Schema, len(deps))
+ for pname := range deps {
+ s.DependentSchemas[pname], err = compile(stack, "dependentSchemas/"+escape(pname))
+ if err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ if r.draft.version >= 2020 {
+ if s.PrefixItems, err = loadSchemas("prefixItems", nil); err != nil {
+ return err
+ }
+ if s.Items2020, err = loadSchema("items", nil); err != nil {
+ return err
+ }
+ } else {
+ if items, ok := m["items"]; ok {
+ switch items.(type) {
+ case []interface{}:
+ s.Items, err = loadSchemas("items", nil)
+ if err != nil {
+ return err
+ }
+ if additionalItems, ok := m["additionalItems"]; ok {
+ switch additionalItems := additionalItems.(type) {
+ case bool:
+ s.AdditionalItems = additionalItems
+ case map[string]interface{}:
+ s.AdditionalItems, err = compile(nil, "additionalItems")
+ if err != nil {
+ return err
+ }
+ }
+ }
+ default:
+ s.Items, err = compile(nil, "items")
+ if err != nil {
+ return err
+ }
+ }
+ }
+ }
+
+ }
+
+ // unevaluatedXXX keywords were in "applicator" vocab in 2019, but moved to new vocab "unevaluated" in 2020
+ if (r.draft.version == 2019 && r.schema.meta.hasVocab("applicator")) || (r.draft.version >= 2020 && r.schema.meta.hasVocab("unevaluated")) {
+ if s.UnevaluatedProperties, err = loadSchema("unevaluatedProperties", nil); err != nil {
+ return err
+ }
+ if s.UnevaluatedItems, err = loadSchema("unevaluatedItems", nil); err != nil {
+ return err
+ }
+ if r.draft.version >= 2020 {
+ // any item in an array that passes validation of the contains schema is considered "evaluated"
+ s.ContainsEval = true
+ }
+ }
+
+ if format, ok := m["format"]; ok {
+ s.Format = format.(string)
+ if r.draft.version < 2019 || c.AssertFormat || r.schema.meta.hasVocab("format-assertion") {
+ s.format, _ = Formats[s.Format]
+ }
+ }
+
+ if c.ExtractAnnotations {
+ if title, ok := m["title"]; ok {
+ s.Title = title.(string)
+ }
+ if description, ok := m["description"]; ok {
+ s.Description = description.(string)
+ }
+ s.Default = m["default"]
+ }
+
+ if r.draft.version >= 6 {
+ if c, ok := m["const"]; ok {
+ s.Constant = []interface{}{c}
+ }
+ }
+
+ if r.draft.version >= 7 {
+ if encoding, ok := m["contentEncoding"]; ok {
+ s.ContentEncoding = encoding.(string)
+ s.decoder, _ = Decoders[s.ContentEncoding]
+ }
+ if mediaType, ok := m["contentMediaType"]; ok {
+ s.ContentMediaType = mediaType.(string)
+ s.mediaType, _ = MediaTypes[s.ContentMediaType]
+ if s.ContentSchema, err = loadSchema("contentSchema", stack); err != nil {
+ return err
+ }
+ }
+ if c.ExtractAnnotations {
+ if comment, ok := m["$comment"]; ok {
+ s.Comment = comment.(string)
+ }
+ if readOnly, ok := m["readOnly"]; ok {
+ s.ReadOnly = readOnly.(bool)
+ }
+ if writeOnly, ok := m["writeOnly"]; ok {
+ s.WriteOnly = writeOnly.(bool)
+ }
+ if examples, ok := m["examples"]; ok {
+ s.Examples = examples.([]interface{})
+ }
+ }
+ }
+
+ if r.draft.version >= 2019 {
+ if !c.AssertContent {
+ s.decoder = nil
+ s.mediaType = nil
+ s.ContentSchema = nil
+ }
+ if c.ExtractAnnotations {
+ if deprecated, ok := m["deprecated"]; ok {
+ s.Deprecated = deprecated.(bool)
+ }
+ }
+ }
+
+ for name, ext := range c.extensions {
+ es, err := ext.compiler.Compile(CompilerContext{c, r, stack, res}, m)
+ if err != nil {
+ return err
+ }
+ if es != nil {
+ if s.Extensions == nil {
+ s.Extensions = make(map[string]ExtSchema)
+ }
+ s.Extensions[name] = es
+ }
+ }
+
+ return nil
+}
+
+func (c *Compiler) validateSchema(r *resource, v interface{}, vloc string) error {
+ validate := func(meta *Schema) error {
+ if meta == nil {
+ return nil
+ }
+ return meta.validateValue(v, vloc)
+ }
+
+ if err := validate(r.draft.meta); err != nil {
+ return err
+ }
+ for _, ext := range c.extensions {
+ if err := validate(ext.meta); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func toStrings(arr []interface{}) []string {
+ s := make([]string, len(arr))
+ for i, v := range arr {
+ s[i] = v.(string)
+ }
+ return s
+}
+
+// SchemaRef captures schema and the path referring to it.
+type schemaRef struct {
+ path string // relative-json-pointer to schema
+ schema *Schema // target schema
+ discard bool // true when scope left
+}
+
+func (sr schemaRef) String() string {
+ return fmt.Sprintf("(%s)%v", sr.path, sr.schema)
+}
+
+func checkLoop(stack []schemaRef, sref schemaRef) error {
+ for _, ref := range stack {
+ if ref.schema == sref.schema {
+ return infiniteLoopError(stack, sref)
+ }
+ }
+ return nil
+}
+
+func keywordLocation(stack []schemaRef, path string) string {
+ var loc string
+ for _, ref := range stack[1:] {
+ loc += "/" + ref.path
+ }
+ if path != "" {
+ loc = loc + "/" + path
+ }
+ return loc
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
new file mode 100644
index 0000000..7570b8b
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
@@ -0,0 +1,29 @@
+package jsonschema
+
+import (
+ "encoding/base64"
+ "encoding/json"
+)
+
+// Decoders is a registry of functions, which know how to decode
+// string encoded in specific format.
+//
+// New Decoders can be registered by adding to this map. Key is encoding name,
+// value is function that knows how to decode string in that format.
+var Decoders = map[string]func(string) ([]byte, error){
+ "base64": base64.StdEncoding.DecodeString,
+}
+
+// MediaTypes is a registry of functions, which know how to validate
+// whether the bytes represent data of that mediaType.
+//
+// New mediaTypes can be registered by adding to this map. Key is mediaType name,
+// value is function that knows how to validate that mediaType.
+var MediaTypes = map[string]func([]byte) error{
+ "application/json": validateJSON,
+}
+
+func validateJSON(b []byte) error {
+ var v interface{}
+ return json.Unmarshal(b, &v)
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
new file mode 100644
index 0000000..a124262
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
@@ -0,0 +1,49 @@
+/*
+Package jsonschema provides json-schema compilation and validation.
+
+Features:
+ - implements draft 2020-12, 2019-09, draft-7, draft-6, draft-4
+ - fully compliant with JSON-Schema-Test-Suite, (excluding some optional)
+ - list of optional tests that are excluded can be found in schema_test.go(variable skipTests)
+ - validates schemas against meta-schema
+ - full support of remote references
+ - support of recursive references between schemas
+ - detects infinite loop in schemas
+ - thread safe validation
+ - rich, intuitive hierarchial error messages with json-pointers to exact location
+ - supports output formats flag, basic and detailed
+ - supports enabling format and content Assertions in draft2019-09 or above
+ - change Compiler.AssertFormat, Compiler.AssertContent to true
+ - compiled schema can be introspected. easier to develop tools like generating go structs given schema
+ - supports user-defined keywords via extensions
+ - implements following formats (supports user-defined)
+ - date-time, date, time, duration (supports leap-second)
+ - uuid, hostname, email
+ - ip-address, ipv4, ipv6
+ - uri, uriref, uri-template(limited validation)
+ - json-pointer, relative-json-pointer
+ - regex, format
+ - implements following contentEncoding (supports user-defined)
+ - base64
+ - implements following contentMediaType (supports user-defined)
+ - application/json
+ - can load from files/http/https/string/[]byte/io.Reader (supports user-defined)
+
+The schema is compiled against the version specified in "$schema" property.
+If "$schema" property is missing, it uses latest draft which currently implemented
+by this library.
+
+You can force to use specific draft, when "$schema" is missing, as follows:
+
+ compiler := jsonschema.NewCompiler()
+ compiler.Draft = jsonschema.Draft4
+
+This package supports loading json-schema from filePath and fileURL.
+
+To load json-schema from HTTPURL, add following import:
+
+ import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
+
+you can validate yaml documents. see https://play.golang.org/p/sJy1qY7dXgA
+*/
+package jsonschema
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
new file mode 100644
index 0000000..eaa9168
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
@@ -0,0 +1,1432 @@
+package jsonschema
+
+import (
+ "strconv"
+ "strings"
+)
+
+// A Draft represents json-schema draft
+type Draft struct {
+ version int
+ meta *Schema
+ id string // property name used to represent schema id.
+ boolSchema bool // is boolean valid schema
+ vocab []string // built-in vocab
+ defaultVocab []string // vocabs when $vocabulary is not used
+ subschemas map[string]position
+}
+
+func (d *Draft) loadMeta(url, schema string) {
+ c := NewCompiler()
+ c.AssertFormat = true
+ if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
+ panic(err)
+ }
+ d.meta = c.MustCompile(url)
+ d.meta.meta = d.meta
+}
+
+func (d *Draft) getID(sch interface{}) string {
+ m, ok := sch.(map[string]interface{})
+ if !ok {
+ return ""
+ }
+ if _, ok := m["$ref"]; ok && d.version <= 7 {
+ // $ref prevents a sibling id from changing the base uri
+ return ""
+ }
+ v, ok := m[d.id]
+ if !ok {
+ return ""
+ }
+ id, ok := v.(string)
+ if !ok {
+ return ""
+ }
+ return id
+}
+
+func (d *Draft) resolveID(base string, sch interface{}) (string, error) {
+ id, _ := split(d.getID(sch)) // strip fragment
+ if id == "" {
+ return "", nil
+ }
+ url, err := resolveURL(base, id)
+ url, _ = split(url) // strip fragment
+ return url, err
+}
+
+func (d *Draft) anchors(sch interface{}) []string {
+ m, ok := sch.(map[string]interface{})
+ if !ok {
+ return nil
+ }
+
+ var anchors []string
+
+ // before draft2019, anchor is specified in id
+ _, f := split(d.getID(m))
+ if f != "#" {
+ anchors = append(anchors, f[1:])
+ }
+
+ if v, ok := m["$anchor"]; ok && d.version >= 2019 {
+ anchors = append(anchors, v.(string))
+ }
+ if v, ok := m["$dynamicAnchor"]; ok && d.version >= 2020 {
+ anchors = append(anchors, v.(string))
+ }
+ return anchors
+}
+
+// listSubschemas collects subschemas in r into rr.
+func (d *Draft) listSubschemas(r *resource, base string, rr map[string]*resource) error {
+ add := func(loc string, sch interface{}) error {
+ url, err := d.resolveID(base, sch)
+ if err != nil {
+ return err
+ }
+ floc := r.floc + "/" + loc
+ sr := &resource{url: url, floc: floc, doc: sch}
+ rr[floc] = sr
+
+ base := base
+ if url != "" {
+ base = url
+ }
+ return d.listSubschemas(sr, base, rr)
+ }
+
+ sch, ok := r.doc.(map[string]interface{})
+ if !ok {
+ return nil
+ }
+ for kw, pos := range d.subschemas {
+ v, ok := sch[kw]
+ if !ok {
+ continue
+ }
+ if pos&self != 0 {
+ switch v := v.(type) {
+ case map[string]interface{}:
+ if err := add(kw, v); err != nil {
+ return err
+ }
+ case bool:
+ if d.boolSchema {
+ if err := add(kw, v); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ if pos&item != 0 {
+ if v, ok := v.([]interface{}); ok {
+ for i, item := range v {
+ if err := add(kw+"/"+strconv.Itoa(i), item); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ if pos&prop != 0 {
+ if v, ok := v.(map[string]interface{}); ok {
+ for pname, pval := range v {
+ if err := add(kw+"/"+escape(pname), pval); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// isVocab tells whether url is built-in vocab.
+func (d *Draft) isVocab(url string) bool {
+ for _, v := range d.vocab {
+ if url == v {
+ return true
+ }
+ }
+ return false
+}
+
+type position uint
+
+const (
+ self position = 1 << iota
+ prop
+ item
+)
+
+// supported drafts
+var (
+ Draft4 = &Draft{version: 4, id: "id", boolSchema: false}
+ Draft6 = &Draft{version: 6, id: "$id", boolSchema: true}
+ Draft7 = &Draft{version: 7, id: "$id", boolSchema: true}
+ Draft2019 = &Draft{
+ version: 2019,
+ id: "$id",
+ boolSchema: true,
+ vocab: []string{
+ "https://json-schema.org/draft/2019-09/vocab/core",
+ "https://json-schema.org/draft/2019-09/vocab/applicator",
+ "https://json-schema.org/draft/2019-09/vocab/validation",
+ "https://json-schema.org/draft/2019-09/vocab/meta-data",
+ "https://json-schema.org/draft/2019-09/vocab/format",
+ "https://json-schema.org/draft/2019-09/vocab/content",
+ },
+ defaultVocab: []string{
+ "https://json-schema.org/draft/2019-09/vocab/core",
+ "https://json-schema.org/draft/2019-09/vocab/applicator",
+ "https://json-schema.org/draft/2019-09/vocab/validation",
+ },
+ }
+ Draft2020 = &Draft{
+ version: 2020,
+ id: "$id",
+ boolSchema: true,
+ vocab: []string{
+ "https://json-schema.org/draft/2020-12/vocab/core",
+ "https://json-schema.org/draft/2020-12/vocab/applicator",
+ "https://json-schema.org/draft/2020-12/vocab/unevaluated",
+ "https://json-schema.org/draft/2020-12/vocab/validation",
+ "https://json-schema.org/draft/2020-12/vocab/meta-data",
+ "https://json-schema.org/draft/2020-12/vocab/format-annotation",
+ "https://json-schema.org/draft/2020-12/vocab/format-assertion",
+ "https://json-schema.org/draft/2020-12/vocab/content",
+ },
+ defaultVocab: []string{
+ "https://json-schema.org/draft/2020-12/vocab/core",
+ "https://json-schema.org/draft/2020-12/vocab/applicator",
+ "https://json-schema.org/draft/2020-12/vocab/unevaluated",
+ "https://json-schema.org/draft/2020-12/vocab/validation",
+ },
+ }
+
+ latest = Draft2020
+)
+
+func findDraft(url string) *Draft {
+ if strings.HasPrefix(url, "http://") {
+ url = "https://" + strings.TrimPrefix(url, "http://")
+ }
+ if strings.HasSuffix(url, "#") || strings.HasSuffix(url, "#/") {
+ url = url[:strings.IndexByte(url, '#')]
+ }
+ switch url {
+ case "https://json-schema.org/schema":
+ return latest
+ case "https://json-schema.org/draft/2020-12/schema":
+ return Draft2020
+ case "https://json-schema.org/draft/2019-09/schema":
+ return Draft2019
+ case "https://json-schema.org/draft-07/schema":
+ return Draft7
+ case "https://json-schema.org/draft-06/schema":
+ return Draft6
+ case "https://json-schema.org/draft-04/schema":
+ return Draft4
+ }
+ return nil
+}
+
+func init() {
+ subschemas := map[string]position{
+ // type agnostic
+ "definitions": prop,
+ "not": self,
+ "allOf": item,
+ "anyOf": item,
+ "oneOf": item,
+ // object
+ "properties": prop,
+ "additionalProperties": self,
+ "patternProperties": prop,
+ // array
+ "items": self | item,
+ "additionalItems": self,
+ "dependencies": prop,
+ }
+ Draft4.subschemas = clone(subschemas)
+
+ subschemas["propertyNames"] = self
+ subschemas["contains"] = self
+ Draft6.subschemas = clone(subschemas)
+
+ subschemas["if"] = self
+ subschemas["then"] = self
+ subschemas["else"] = self
+ Draft7.subschemas = clone(subschemas)
+
+ subschemas["$defs"] = prop
+ subschemas["dependentSchemas"] = prop
+ subschemas["unevaluatedProperties"] = self
+ subschemas["unevaluatedItems"] = self
+ Draft2019.subschemas = clone(subschemas)
+
+ subschemas["prefixItems"] = item
+ Draft2020.subschemas = clone(subschemas)
+
+ Draft4.loadMeta("http://json-schema.org/draft-04/schema", `{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "description": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "positiveInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "positiveIntegerDefault0": {
+ "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
+ },
+ "simpleTypes": {
+ "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ },
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "string",
+ "format": "uriref"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": {},
+ "multipleOf": {
+ "type": "number",
+ "minimum": 0,
+ "exclusiveMinimum": true
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "boolean",
+ "default": false
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "boolean",
+ "default": false
+ },
+ "maxLength": { "$ref": "#/definitions/positiveInteger" },
+ "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": {
+ "anyOf": [
+ { "type": "boolean" },
+ { "$ref": "#" }
+ ],
+ "default": {}
+ },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": {}
+ },
+ "maxItems": { "$ref": "#/definitions/positiveInteger" },
+ "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "maxProperties": { "$ref": "#/definitions/positiveInteger" },
+ "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": {
+ "anyOf": [
+ { "type": "boolean" },
+ { "$ref": "#" }
+ ],
+ "default": {}
+ },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "regexProperties": true,
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "regexProperties": { "type": "boolean" },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "enum": {
+ "type": "array",
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" },
+ "format": { "type": "string" },
+ "$ref": { "type": "string" }
+ },
+ "dependencies": {
+ "exclusiveMaximum": [ "maximum" ],
+ "exclusiveMinimum": [ "minimum" ]
+ },
+ "default": {}
+ }`)
+ Draft6.loadMeta("http://json-schema.org/draft-06/schema", `{
+ "$schema": "http://json-schema.org/draft-06/schema#",
+ "$id": "http://json-schema.org/draft-06/schema#",
+ "title": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "allOf": [
+ { "$ref": "#/definitions/nonNegativeInteger" },
+ { "default": 0 }
+ ]
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ },
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": {},
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": { "$ref": "#" },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": {}
+ },
+ "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "contains": { "$ref": "#" },
+ "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": { "$ref": "#" },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "regexProperties": true,
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "propertyNames": { "$ref": "#" },
+ "const": {},
+ "enum": {
+ "type": "array",
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "format": { "type": "string" },
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" }
+ },
+ "default": {}
+ }`)
+ Draft7.loadMeta("http://json-schema.org/draft-07/schema", `{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "$id": "http://json-schema.org/draft-07/schema#",
+ "title": "Core schema meta-schema",
+ "definitions": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$ref": "#" }
+ },
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "allOf": [
+ { "$ref": "#/definitions/nonNegativeInteger" },
+ { "default": 0 }
+ ]
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ },
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$comment": {
+ "type": "string"
+ },
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": true,
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "writeOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "examples": {
+ "type": "array",
+ "items": true
+ },
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "additionalItems": { "$ref": "#" },
+ "items": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/schemaArray" }
+ ],
+ "default": true
+ },
+ "maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "contains": { "$ref": "#" },
+ "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/definitions/stringArray" },
+ "additionalProperties": { "$ref": "#" },
+ "definitions": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$ref": "#" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependencies": {
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$ref": "#" },
+ { "$ref": "#/definitions/stringArray" }
+ ]
+ }
+ },
+ "propertyNames": { "$ref": "#" },
+ "const": true,
+ "enum": {
+ "type": "array",
+ "items": true,
+ "minItems": 1,
+ "uniqueItems": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/definitions/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/definitions/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "format": { "type": "string" },
+ "contentMediaType": { "type": "string" },
+ "contentEncoding": { "type": "string" },
+ "if": { "$ref": "#" },
+ "then": { "$ref": "#" },
+ "else": { "$ref": "#" },
+ "allOf": { "$ref": "#/definitions/schemaArray" },
+ "anyOf": { "$ref": "#/definitions/schemaArray" },
+ "oneOf": { "$ref": "#/definitions/schemaArray" },
+ "not": { "$ref": "#" }
+ },
+ "default": true
+ }`)
+ Draft2019.loadMeta("https://json-schema.org/draft/2019-09/schema", `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/schema",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/core": true,
+ "https://json-schema.org/draft/2019-09/vocab/applicator": true,
+ "https://json-schema.org/draft/2019-09/vocab/validation": true,
+ "https://json-schema.org/draft/2019-09/vocab/meta-data": true,
+ "https://json-schema.org/draft/2019-09/vocab/format": false,
+ "https://json-schema.org/draft/2019-09/vocab/content": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Core and Validation specifications meta-schema",
+ "allOf": [
+ {"$ref": "meta/core"},
+ {"$ref": "meta/applicator"},
+ {"$ref": "meta/validation"},
+ {"$ref": "meta/meta-data"},
+ {"$ref": "meta/format"},
+ {"$ref": "meta/content"}
+ ],
+ "type": ["object", "boolean"],
+ "properties": {
+ "definitions": {
+ "$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.",
+ "type": "object",
+ "additionalProperties": { "$recursiveRef": "#" },
+ "default": {}
+ },
+ "dependencies": {
+ "$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"",
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$recursiveRef": "#" },
+ { "$ref": "meta/validation#/$defs/stringArray" }
+ ]
+ }
+ }
+ }
+ }`)
+ Draft2020.loadMeta("https://json-schema.org/draft/2020-12/schema", `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/schema",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/core": true,
+ "https://json-schema.org/draft/2020-12/vocab/applicator": true,
+ "https://json-schema.org/draft/2020-12/vocab/unevaluated": true,
+ "https://json-schema.org/draft/2020-12/vocab/validation": true,
+ "https://json-schema.org/draft/2020-12/vocab/meta-data": true,
+ "https://json-schema.org/draft/2020-12/vocab/format-annotation": true,
+ "https://json-schema.org/draft/2020-12/vocab/content": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Core and Validation specifications meta-schema",
+ "allOf": [
+ {"$ref": "meta/core"},
+ {"$ref": "meta/applicator"},
+ {"$ref": "meta/unevaluated"},
+ {"$ref": "meta/validation"},
+ {"$ref": "meta/meta-data"},
+ {"$ref": "meta/format-annotation"},
+ {"$ref": "meta/content"}
+ ],
+ "type": ["object", "boolean"],
+ "$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.",
+ "properties": {
+ "definitions": {
+ "$comment": "\"definitions\" has been replaced by \"$defs\".",
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "deprecated": true,
+ "default": {}
+ },
+ "dependencies": {
+ "$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.",
+ "type": "object",
+ "additionalProperties": {
+ "anyOf": [
+ { "$dynamicRef": "#meta" },
+ { "$ref": "meta/validation#/$defs/stringArray" }
+ ]
+ },
+ "deprecated": true,
+ "default": {}
+ },
+ "$recursiveAnchor": {
+ "$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".",
+ "$ref": "meta/core#/$defs/anchorString",
+ "deprecated": true
+ },
+ "$recursiveRef": {
+ "$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".",
+ "$ref": "meta/core#/$defs/uriReferenceString",
+ "deprecated": true
+ }
+ }
+ }`)
+}
+
+var vocabSchemas = map[string]string{
+ "https://json-schema.org/draft/2019-09/meta/core": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/core",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/core": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Core vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "type": "string",
+ "format": "uri-reference",
+ "$comment": "Non-empty fragments not allowed.",
+ "pattern": "^[^#]*#?$"
+ },
+ "$schema": {
+ "type": "string",
+ "format": "uri"
+ },
+ "$anchor": {
+ "type": "string",
+ "pattern": "^[A-Za-z][-A-Za-z0-9.:_]*$"
+ },
+ "$ref": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$recursiveRef": {
+ "type": "string",
+ "format": "uri-reference"
+ },
+ "$recursiveAnchor": {
+ "type": "boolean",
+ "default": false
+ },
+ "$vocabulary": {
+ "type": "object",
+ "propertyNames": {
+ "type": "string",
+ "format": "uri"
+ },
+ "additionalProperties": {
+ "type": "boolean"
+ }
+ },
+ "$comment": {
+ "type": "string"
+ },
+ "$defs": {
+ "type": "object",
+ "additionalProperties": { "$recursiveRef": "#" },
+ "default": {}
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/applicator": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/applicator",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/applicator": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Applicator vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "additionalItems": { "$recursiveRef": "#" },
+ "unevaluatedItems": { "$recursiveRef": "#" },
+ "items": {
+ "anyOf": [
+ { "$recursiveRef": "#" },
+ { "$ref": "#/$defs/schemaArray" }
+ ]
+ },
+ "contains": { "$recursiveRef": "#" },
+ "additionalProperties": { "$recursiveRef": "#" },
+ "unevaluatedProperties": { "$recursiveRef": "#" },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$recursiveRef": "#" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$recursiveRef": "#" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependentSchemas": {
+ "type": "object",
+ "additionalProperties": {
+ "$recursiveRef": "#"
+ }
+ },
+ "propertyNames": { "$recursiveRef": "#" },
+ "if": { "$recursiveRef": "#" },
+ "then": { "$recursiveRef": "#" },
+ "else": { "$recursiveRef": "#" },
+ "allOf": { "$ref": "#/$defs/schemaArray" },
+ "anyOf": { "$ref": "#/$defs/schemaArray" },
+ "oneOf": { "$ref": "#/$defs/schemaArray" },
+ "not": { "$recursiveRef": "#" }
+ },
+ "$defs": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$recursiveRef": "#" }
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/validation": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/validation",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/validation": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Validation vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minContains": {
+ "$ref": "#/$defs/nonNegativeInteger",
+ "default": 1
+ },
+ "maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/$defs/stringArray" },
+ "dependentRequired": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/$defs/stringArray"
+ }
+ },
+ "const": true,
+ "enum": {
+ "type": "array",
+ "items": true
+ },
+ "type": {
+ "anyOf": [
+ { "$ref": "#/$defs/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/$defs/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ }
+ },
+ "$defs": {
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "$ref": "#/$defs/nonNegativeInteger",
+ "default": 0
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/meta-data": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/meta-data",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/meta-data": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Meta-data vocabulary meta-schema",
+
+ "type": ["object", "boolean"],
+ "properties": {
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": true,
+ "deprecated": {
+ "type": "boolean",
+ "default": false
+ },
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "writeOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "examples": {
+ "type": "array",
+ "items": true
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/format": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/format",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/format": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Format vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "format": { "type": "string" }
+ }
+ }`,
+ "https://json-schema.org/draft/2019-09/meta/content": `{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "https://json-schema.org/draft/2019-09/meta/content",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2019-09/vocab/content": true
+ },
+ "$recursiveAnchor": true,
+
+ "title": "Content vocabulary meta-schema",
+
+ "type": ["object", "boolean"],
+ "properties": {
+ "contentMediaType": { "type": "string" },
+ "contentEncoding": { "type": "string" },
+ "contentSchema": { "$recursiveRef": "#" }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/core": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/core",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/core": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Core vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "$id": {
+ "$ref": "#/$defs/uriReferenceString",
+ "$comment": "Non-empty fragments not allowed.",
+ "pattern": "^[^#]*#?$"
+ },
+ "$schema": { "$ref": "#/$defs/uriString" },
+ "$ref": { "$ref": "#/$defs/uriReferenceString" },
+ "$anchor": { "$ref": "#/$defs/anchorString" },
+ "$dynamicRef": { "$ref": "#/$defs/uriReferenceString" },
+ "$dynamicAnchor": { "$ref": "#/$defs/anchorString" },
+ "$vocabulary": {
+ "type": "object",
+ "propertyNames": { "$ref": "#/$defs/uriString" },
+ "additionalProperties": {
+ "type": "boolean"
+ }
+ },
+ "$comment": {
+ "type": "string"
+ },
+ "$defs": {
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" }
+ }
+ },
+ "$defs": {
+ "anchorString": {
+ "type": "string",
+ "pattern": "^[A-Za-z_][-A-Za-z0-9._]*$"
+ },
+ "uriString": {
+ "type": "string",
+ "format": "uri"
+ },
+ "uriReferenceString": {
+ "type": "string",
+ "format": "uri-reference"
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/applicator": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/applicator",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/applicator": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Applicator vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "prefixItems": { "$ref": "#/$defs/schemaArray" },
+ "items": { "$dynamicRef": "#meta" },
+ "contains": { "$dynamicRef": "#meta" },
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "properties": {
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "default": {}
+ },
+ "patternProperties": {
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "propertyNames": { "format": "regex" },
+ "default": {}
+ },
+ "dependentSchemas": {
+ "type": "object",
+ "additionalProperties": { "$dynamicRef": "#meta" },
+ "default": {}
+ },
+ "propertyNames": { "$dynamicRef": "#meta" },
+ "if": { "$dynamicRef": "#meta" },
+ "then": { "$dynamicRef": "#meta" },
+ "else": { "$dynamicRef": "#meta" },
+ "allOf": { "$ref": "#/$defs/schemaArray" },
+ "anyOf": { "$ref": "#/$defs/schemaArray" },
+ "oneOf": { "$ref": "#/$defs/schemaArray" },
+ "not": { "$dynamicRef": "#meta" }
+ },
+ "$defs": {
+ "schemaArray": {
+ "type": "array",
+ "minItems": 1,
+ "items": { "$dynamicRef": "#meta" }
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/unevaluated": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/unevaluated",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/unevaluated": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Unevaluated applicator vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "unevaluatedItems": { "$dynamicRef": "#meta" },
+ "unevaluatedProperties": { "$dynamicRef": "#meta" }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/validation": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/validation",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/validation": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Validation vocabulary meta-schema",
+ "type": ["object", "boolean"],
+ "properties": {
+ "type": {
+ "anyOf": [
+ { "$ref": "#/$defs/simpleTypes" },
+ {
+ "type": "array",
+ "items": { "$ref": "#/$defs/simpleTypes" },
+ "minItems": 1,
+ "uniqueItems": true
+ }
+ ]
+ },
+ "const": true,
+ "enum": {
+ "type": "array",
+ "items": true
+ },
+ "multipleOf": {
+ "type": "number",
+ "exclusiveMinimum": 0
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "exclusiveMaximum": {
+ "type": "number"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "exclusiveMinimum": {
+ "type": "number"
+ },
+ "maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "pattern": {
+ "type": "string",
+ "format": "regex"
+ },
+ "maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "uniqueItems": {
+ "type": "boolean",
+ "default": false
+ },
+ "maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minContains": {
+ "$ref": "#/$defs/nonNegativeInteger",
+ "default": 1
+ },
+ "maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
+ "minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
+ "required": { "$ref": "#/$defs/stringArray" },
+ "dependentRequired": {
+ "type": "object",
+ "additionalProperties": {
+ "$ref": "#/$defs/stringArray"
+ }
+ }
+ },
+ "$defs": {
+ "nonNegativeInteger": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "nonNegativeIntegerDefault0": {
+ "$ref": "#/$defs/nonNegativeInteger",
+ "default": 0
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ "default": []
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/meta-data": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/meta-data",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/meta-data": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Meta-data vocabulary meta-schema",
+
+ "type": ["object", "boolean"],
+ "properties": {
+ "title": {
+ "type": "string"
+ },
+ "description": {
+ "type": "string"
+ },
+ "default": true,
+ "deprecated": {
+ "type": "boolean",
+ "default": false
+ },
+ "readOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "writeOnly": {
+ "type": "boolean",
+ "default": false
+ },
+ "examples": {
+ "type": "array",
+ "items": true
+ }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/format-annotation": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/format-annotation",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/format-annotation": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Format vocabulary meta-schema for annotation results",
+ "type": ["object", "boolean"],
+ "properties": {
+ "format": { "type": "string" }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/format-assertion": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/format-assertion",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/format-assertion": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Format vocabulary meta-schema for assertion results",
+ "type": ["object", "boolean"],
+ "properties": {
+ "format": { "type": "string" }
+ }
+ }`,
+ "https://json-schema.org/draft/2020-12/meta/content": `{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://json-schema.org/draft/2020-12/meta/content",
+ "$vocabulary": {
+ "https://json-schema.org/draft/2020-12/vocab/content": true
+ },
+ "$dynamicAnchor": "meta",
+
+ "title": "Content vocabulary meta-schema",
+
+ "type": ["object", "boolean"],
+ "properties": {
+ "contentEncoding": { "type": "string" },
+ "contentMediaType": { "type": "string" },
+ "contentSchema": { "$dynamicRef": "#meta" }
+ }
+ }`,
+}
+
+func clone(m map[string]position) map[string]position {
+ mm := make(map[string]position)
+ for k, v := range m {
+ mm[k] = v
+ }
+ return mm
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
new file mode 100644
index 0000000..deaded8
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
@@ -0,0 +1,129 @@
+package jsonschema
+
+import (
+ "fmt"
+ "strings"
+)
+
+// InvalidJSONTypeError is the error type returned by ValidateInterface.
+// this tells that specified go object is not valid jsonType.
+type InvalidJSONTypeError string
+
+func (e InvalidJSONTypeError) Error() string {
+ return fmt.Sprintf("jsonschema: invalid jsonType: %s", string(e))
+}
+
+// InfiniteLoopError is returned by Compile/Validate.
+// this gives url#keywordLocation that lead to infinity loop.
+type InfiniteLoopError string
+
+func (e InfiniteLoopError) Error() string {
+ return "jsonschema: infinite loop " + string(e)
+}
+
+func infiniteLoopError(stack []schemaRef, sref schemaRef) InfiniteLoopError {
+ var path string
+ for _, ref := range stack {
+ if path == "" {
+ path += ref.schema.Location
+ } else {
+ path += "/" + ref.path
+ }
+ }
+ return InfiniteLoopError(path + "/" + sref.path)
+}
+
+// SchemaError is the error type returned by Compile.
+type SchemaError struct {
+ // SchemaURL is the url to json-schema that filed to compile.
+ // This is helpful, if your schema refers to external schemas
+ SchemaURL string
+
+ // Err is the error that occurred during compilation.
+ // It could be ValidationError, because compilation validates
+ // given schema against the json meta-schema
+ Err error
+}
+
+func (se *SchemaError) Unwrap() error {
+ return se.Err
+}
+
+func (se *SchemaError) Error() string {
+ s := fmt.Sprintf("jsonschema %s compilation failed", se.SchemaURL)
+ if se.Err != nil {
+ return fmt.Sprintf("%s: %v", s, strings.TrimPrefix(se.Err.Error(), "jsonschema: "))
+ }
+ return s
+}
+
+func (se *SchemaError) GoString() string {
+ if _, ok := se.Err.(*ValidationError); ok {
+ return fmt.Sprintf("jsonschema %s compilation failed\n%#v", se.SchemaURL, se.Err)
+ }
+ return se.Error()
+}
+
+// ValidationError is the error type returned by Validate.
+type ValidationError struct {
+ KeywordLocation string // validation path of validating keyword or schema
+ AbsoluteKeywordLocation string // absolute location of validating keyword or schema
+ InstanceLocation string // location of the json value within the instance being validated
+ Message string // describes error
+ Causes []*ValidationError // nested validation errors
+}
+
+func (ve *ValidationError) add(causes ...error) error {
+ for _, cause := range causes {
+ ve.Causes = append(ve.Causes, cause.(*ValidationError))
+ }
+ return ve
+}
+
+func (ve *ValidationError) causes(err error) error {
+ if err := err.(*ValidationError); err.Message == "" {
+ ve.Causes = err.Causes
+ } else {
+ ve.add(err)
+ }
+ return ve
+}
+
+func (ve *ValidationError) Error() string {
+ leaf := ve
+ for len(leaf.Causes) > 0 {
+ leaf = leaf.Causes[0]
+ }
+ u, _ := split(ve.AbsoluteKeywordLocation)
+ return fmt.Sprintf("jsonschema: %s does not validate with %s: %s", quote(leaf.InstanceLocation), u+"#"+leaf.KeywordLocation, leaf.Message)
+}
+
+func (ve *ValidationError) GoString() string {
+ sloc := ve.AbsoluteKeywordLocation
+ sloc = sloc[strings.IndexByte(sloc, '#')+1:]
+ msg := fmt.Sprintf("[I#%s] [S#%s] %s", ve.InstanceLocation, sloc, ve.Message)
+ for _, c := range ve.Causes {
+ for _, line := range strings.Split(c.GoString(), "\n") {
+ msg += "\n " + line
+ }
+ }
+ return msg
+}
+
+func joinPtr(ptr1, ptr2 string) string {
+ if len(ptr1) == 0 {
+ return ptr2
+ }
+ if len(ptr2) == 0 {
+ return ptr1
+ }
+ return ptr1 + "/" + ptr2
+}
+
+// quote returns single-quoted string
+func quote(s string) string {
+ s = fmt.Sprintf("%q", s)
+ s = strings.ReplaceAll(s, `\"`, `"`)
+ s = strings.ReplaceAll(s, `'`, `\'`)
+ return "'" + s[1:len(s)-1] + "'"
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
new file mode 100644
index 0000000..452ba11
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
@@ -0,0 +1,116 @@
+package jsonschema
+
+// ExtCompiler compiles custom keyword(s) into ExtSchema.
+type ExtCompiler interface {
+ // Compile compiles the custom keywords in schema m and returns its compiled representation.
+ // if the schema m does not contain the keywords defined by this extension,
+ // compiled representation nil should be returned.
+ Compile(ctx CompilerContext, m map[string]interface{}) (ExtSchema, error)
+}
+
+// ExtSchema is schema representation of custom keyword(s)
+type ExtSchema interface {
+ // Validate validates the json value v with this ExtSchema.
+ // Returned error must be *ValidationError.
+ Validate(ctx ValidationContext, v interface{}) error
+}
+
+type extension struct {
+ meta *Schema
+ compiler ExtCompiler
+}
+
+// RegisterExtension registers custom keyword(s) into this compiler.
+//
+// name is extension name, used only to avoid name collisions.
+// meta captures the metaschema for the new keywords.
+// This is used to validate the schema before calling ext.Compile.
+func (c *Compiler) RegisterExtension(name string, meta *Schema, ext ExtCompiler) {
+ c.extensions[name] = extension{meta, ext}
+}
+
+// CompilerContext ---
+
+// CompilerContext provides additional context required in compiling for extension.
+type CompilerContext struct {
+ c *Compiler
+ r *resource
+ stack []schemaRef
+ res *resource
+}
+
+// Compile compiles given value at ptr into *Schema. This is useful in implementing
+// keyword like allOf/not/patternProperties.
+//
+// schPath is the relative-json-pointer to the schema to be compiled from parent schema.
+//
+// applicableOnSameInstance tells whether current schema and the given schema
+// are applied on same instance value. this is used to detect infinite loop in schema.
+func (ctx CompilerContext) Compile(schPath string, applicableOnSameInstance bool) (*Schema, error) {
+ var stack []schemaRef
+ if applicableOnSameInstance {
+ stack = ctx.stack
+ }
+ return ctx.c.compileRef(ctx.r, stack, schPath, ctx.res, ctx.r.url+ctx.res.floc+"/"+schPath)
+}
+
+// CompileRef compiles the schema referenced by ref uri
+//
+// refPath is the relative-json-pointer to ref.
+//
+// applicableOnSameInstance tells whether current schema and the given schema
+// are applied on same instance value. this is used to detect infinite loop in schema.
+func (ctx CompilerContext) CompileRef(ref string, refPath string, applicableOnSameInstance bool) (*Schema, error) {
+ var stack []schemaRef
+ if applicableOnSameInstance {
+ stack = ctx.stack
+ }
+ return ctx.c.compileRef(ctx.r, stack, refPath, ctx.res, ref)
+}
+
+// ValidationContext ---
+
+// ValidationContext provides additional context required in validating for extension.
+type ValidationContext struct {
+ result validationResult
+ validate func(sch *Schema, schPath string, v interface{}, vpath string) error
+ validateInplace func(sch *Schema, schPath string) error
+ validationError func(keywordPath string, format string, a ...interface{}) *ValidationError
+}
+
+// EvaluatedProp marks given property of object as evaluated.
+func (ctx ValidationContext) EvaluatedProp(prop string) {
+ delete(ctx.result.unevalProps, prop)
+}
+
+// EvaluatedItem marks given index of array as evaluated.
+func (ctx ValidationContext) EvaluatedItem(index int) {
+ delete(ctx.result.unevalItems, index)
+}
+
+// Validate validates schema s with value v. Extension must use this method instead of
+// *Schema.ValidateInterface method. This will be useful in implementing keywords like
+// allOf/oneOf
+//
+// spath is relative-json-pointer to s
+// vpath is relative-json-pointer to v.
+func (ctx ValidationContext) Validate(s *Schema, spath string, v interface{}, vpath string) error {
+ if vpath == "" {
+ return ctx.validateInplace(s, spath)
+ }
+ return ctx.validate(s, spath, v, vpath)
+}
+
+// Error used to construct validation error by extensions.
+//
+// keywordPath is relative-json-pointer to keyword.
+func (ctx ValidationContext) Error(keywordPath string, format string, a ...interface{}) *ValidationError {
+ return ctx.validationError(keywordPath, format, a...)
+}
+
+// Group is used by extensions to group multiple errors as causes to parent error.
+// This is useful in implementing keywords like allOf where each schema specified
+// in allOf can result a validationError.
+func (ValidationError) Group(parent *ValidationError, causes ...error) error {
+ return parent.add(causes...)
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
new file mode 100644
index 0000000..0568607
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
@@ -0,0 +1,567 @@
+package jsonschema
+
+import (
+ "errors"
+ "net"
+ "net/mail"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// Formats is a registry of functions, which know how to validate
+// a specific format.
+//
+// New Formats can be registered by adding to this map. Key is format name,
+// value is function that knows how to validate that format.
+var Formats = map[string]func(interface{}) bool{
+ "date-time": isDateTime,
+ "date": isDate,
+ "time": isTime,
+ "duration": isDuration,
+ "period": isPeriod,
+ "hostname": isHostname,
+ "email": isEmail,
+ "ip-address": isIPV4,
+ "ipv4": isIPV4,
+ "ipv6": isIPV6,
+ "uri": isURI,
+ "iri": isURI,
+ "uri-reference": isURIReference,
+ "uriref": isURIReference,
+ "iri-reference": isURIReference,
+ "uri-template": isURITemplate,
+ "regex": isRegex,
+ "json-pointer": isJSONPointer,
+ "relative-json-pointer": isRelativeJSONPointer,
+ "uuid": isUUID,
+}
+
+// isDateTime tells whether given string is a valid date representation
+// as defined by RFC 3339, section 5.6.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
+func isDateTime(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if len(s) < 20 { // yyyy-mm-ddThh:mm:ssZ
+ return false
+ }
+ if s[10] != 'T' && s[10] != 't' {
+ return false
+ }
+ return isDate(s[:10]) && isTime(s[11:])
+}
+
+// isDate tells whether given string is a valid full-date production
+// as defined by RFC 3339, section 5.6.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
+func isDate(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ _, err := time.Parse("2006-01-02", s)
+ return err == nil
+}
+
+// isTime tells whether given string is a valid full-time production
+// as defined by RFC 3339, section 5.6.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
+func isTime(v interface{}) bool {
+ str, ok := v.(string)
+ if !ok {
+ return true
+ }
+
+ // golang time package does not support leap seconds.
+ // so we are parsing it manually here.
+
+ // hh:mm:ss
+ // 01234567
+ if len(str) < 9 || str[2] != ':' || str[5] != ':' {
+ return false
+ }
+ isInRange := func(str string, min, max int) (int, bool) {
+ n, err := strconv.Atoi(str)
+ if err != nil {
+ return 0, false
+ }
+ if n < min || n > max {
+ return 0, false
+ }
+ return n, true
+ }
+ var h, m, s int
+ if h, ok = isInRange(str[0:2], 0, 23); !ok {
+ return false
+ }
+ if m, ok = isInRange(str[3:5], 0, 59); !ok {
+ return false
+ }
+ if s, ok = isInRange(str[6:8], 0, 60); !ok {
+ return false
+ }
+ str = str[8:]
+
+ // parse secfrac if present
+ if str[0] == '.' {
+ // dot following more than one digit
+ str = str[1:]
+ var numDigits int
+ for str != "" {
+ if str[0] < '0' || str[0] > '9' {
+ break
+ }
+ numDigits++
+ str = str[1:]
+ }
+ if numDigits == 0 {
+ return false
+ }
+ }
+
+ if len(str) == 0 {
+ return false
+ }
+
+ if str[0] == 'z' || str[0] == 'Z' {
+ if len(str) != 1 {
+ return false
+ }
+ } else {
+ // time-numoffset
+ // +hh:mm
+ // 012345
+ if len(str) != 6 || str[3] != ':' {
+ return false
+ }
+
+ var sign int
+ if str[0] == '+' {
+ sign = -1
+ } else if str[0] == '-' {
+ sign = +1
+ } else {
+ return false
+ }
+
+ var zh, zm int
+ if zh, ok = isInRange(str[1:3], 0, 23); !ok {
+ return false
+ }
+ if zm, ok = isInRange(str[4:6], 0, 59); !ok {
+ return false
+ }
+
+ // apply timezone offset
+ hm := (h*60 + m) + sign*(zh*60+zm)
+ if hm < 0 {
+ hm += 24 * 60
+ }
+ h, m = hm/60, hm%60
+ }
+
+ // check leapsecond
+ if s == 60 { // leap second
+ if h != 23 || m != 59 {
+ return false
+ }
+ }
+
+ return true
+}
+
+// isDuration tells whether given string is a valid duration format
+// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
+func isDuration(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if len(s) == 0 || s[0] != 'P' {
+ return false
+ }
+ s = s[1:]
+ parseUnits := func() (units string, ok bool) {
+ for len(s) > 0 && s[0] != 'T' {
+ digits := false
+ for {
+ if len(s) == 0 {
+ break
+ }
+ if s[0] < '0' || s[0] > '9' {
+ break
+ }
+ digits = true
+ s = s[1:]
+ }
+ if !digits || len(s) == 0 {
+ return units, false
+ }
+ units += s[:1]
+ s = s[1:]
+ }
+ return units, true
+ }
+ units, ok := parseUnits()
+ if !ok {
+ return false
+ }
+ if units == "W" {
+ return len(s) == 0 // P_W
+ }
+ if len(units) > 0 {
+ if strings.Index("YMD", units) == -1 {
+ return false
+ }
+ if len(s) == 0 {
+ return true // "P" dur-date
+ }
+ }
+ if len(s) == 0 || s[0] != 'T' {
+ return false
+ }
+ s = s[1:]
+ units, ok = parseUnits()
+ return ok && len(s) == 0 && len(units) > 0 && strings.Index("HMS", units) != -1
+}
+
+// isPeriod tells whether given string is a valid period format
+// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
+//
+// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
+func isPeriod(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ slash := strings.IndexByte(s, '/')
+ if slash == -1 {
+ return false
+ }
+ start, end := s[:slash], s[slash+1:]
+ if isDateTime(start) {
+ return isDateTime(end) || isDuration(end)
+ }
+ return isDuration(start) && isDateTime(end)
+}
+
+// isHostname tells whether given string is a valid representation
+// for an Internet host name, as defined by RFC 1034 section 3.1 and
+// RFC 1123 section 2.1.
+//
+// See https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names, for details.
+func isHostname(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ // entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters
+ s = strings.TrimSuffix(s, ".")
+ if len(s) > 253 {
+ return false
+ }
+
+ // Hostnames are composed of series of labels concatenated with dots, as are all domain names
+ for _, label := range strings.Split(s, ".") {
+ // Each label must be from 1 to 63 characters long
+ if labelLen := len(label); labelLen < 1 || labelLen > 63 {
+ return false
+ }
+
+ // labels must not start with a hyphen
+ // RFC 1123 section 2.1: restriction on the first character
+ // is relaxed to allow either a letter or a digit
+ if first := s[0]; first == '-' {
+ return false
+ }
+
+ // must not end with a hyphen
+ if label[len(label)-1] == '-' {
+ return false
+ }
+
+ // labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner),
+ // the digits '0' through '9', and the hyphen ('-')
+ for _, c := range label {
+ if valid := (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '-'); !valid {
+ return false
+ }
+ }
+ }
+
+ return true
+}
+
+// isEmail tells whether given string is a valid Internet email address
+// as defined by RFC 5322, section 3.4.1.
+//
+// See https://en.wikipedia.org/wiki/Email_address, for details.
+func isEmail(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ // entire email address to be no more than 254 characters long
+ if len(s) > 254 {
+ return false
+ }
+
+ // email address is generally recognized as having two parts joined with an at-sign
+ at := strings.LastIndexByte(s, '@')
+ if at == -1 {
+ return false
+ }
+ local := s[0:at]
+ domain := s[at+1:]
+
+ // local part may be up to 64 characters long
+ if len(local) > 64 {
+ return false
+ }
+
+ // domain if enclosed in brackets, must match an IP address
+ if len(domain) >= 2 && domain[0] == '[' && domain[len(domain)-1] == ']' {
+ ip := domain[1 : len(domain)-1]
+ if strings.HasPrefix(ip, "IPv6:") {
+ return isIPV6(strings.TrimPrefix(ip, "IPv6:"))
+ }
+ return isIPV4(ip)
+ }
+
+ // domain must match the requirements for a hostname
+ if !isHostname(domain) {
+ return false
+ }
+
+ _, err := mail.ParseAddress(s)
+ return err == nil
+}
+
+// isIPV4 tells whether given string is a valid representation of an IPv4 address
+// according to the "dotted-quad" ABNF syntax as defined in RFC 2673, section 3.2.
+func isIPV4(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ groups := strings.Split(s, ".")
+ if len(groups) != 4 {
+ return false
+ }
+ for _, group := range groups {
+ n, err := strconv.Atoi(group)
+ if err != nil {
+ return false
+ }
+ if n < 0 || n > 255 {
+ return false
+ }
+ if n != 0 && group[0] == '0' {
+ return false // leading zeroes should be rejected, as they are treated as octals
+ }
+ }
+ return true
+}
+
+// isIPV6 tells whether given string is a valid representation of an IPv6 address
+// as defined in RFC 2373, section 2.2.
+func isIPV6(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if !strings.Contains(s, ":") {
+ return false
+ }
+ return net.ParseIP(s) != nil
+}
+
+// isURI tells whether given string is valid URI, according to RFC 3986.
+func isURI(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ u, err := urlParse(s)
+ return err == nil && u.IsAbs()
+}
+
+func urlParse(s string) (*url.URL, error) {
+ u, err := url.Parse(s)
+ if err != nil {
+ return nil, err
+ }
+
+ // if hostname is ipv6, validate it
+ hostname := u.Hostname()
+ if strings.IndexByte(hostname, ':') != -1 {
+ if strings.IndexByte(u.Host, '[') == -1 || strings.IndexByte(u.Host, ']') == -1 {
+ return nil, errors.New("ipv6 address is not enclosed in brackets")
+ }
+ if !isIPV6(hostname) {
+ return nil, errors.New("invalid ipv6 address")
+ }
+ }
+ return u, nil
+}
+
+// isURIReference tells whether given string is a valid URI Reference
+// (either a URI or a relative-reference), according to RFC 3986.
+func isURIReference(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ _, err := urlParse(s)
+ return err == nil && !strings.Contains(s, `\`)
+}
+
+// isURITemplate tells whether given string is a valid URI Template
+// according to RFC6570.
+//
+// Current implementation does minimal validation.
+func isURITemplate(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ u, err := urlParse(s)
+ if err != nil {
+ return false
+ }
+ for _, item := range strings.Split(u.RawPath, "/") {
+ depth := 0
+ for _, ch := range item {
+ switch ch {
+ case '{':
+ depth++
+ if depth != 1 {
+ return false
+ }
+ case '}':
+ depth--
+ if depth != 0 {
+ return false
+ }
+ }
+ }
+ if depth != 0 {
+ return false
+ }
+ }
+ return true
+}
+
+// isRegex tells whether given string is a valid regular expression,
+// according to the ECMA 262 regular expression dialect.
+//
+// The implementation uses go-lang regexp package.
+func isRegex(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ _, err := regexp.Compile(s)
+ return err == nil
+}
+
+// isJSONPointer tells whether given string is a valid JSON Pointer.
+//
+// Note: It returns false for JSON Pointer URI fragments.
+func isJSONPointer(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if s != "" && !strings.HasPrefix(s, "/") {
+ return false
+ }
+ for _, item := range strings.Split(s, "/") {
+ for i := 0; i < len(item); i++ {
+ if item[i] == '~' {
+ if i == len(item)-1 {
+ return false
+ }
+ switch item[i+1] {
+ case '0', '1':
+ // valid
+ default:
+ return false
+ }
+ }
+ }
+ }
+ return true
+}
+
+// isRelativeJSONPointer tells whether given string is a valid Relative JSON Pointer.
+//
+// see https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
+func isRelativeJSONPointer(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ if s == "" {
+ return false
+ }
+ if s[0] == '0' {
+ s = s[1:]
+ } else if s[0] >= '0' && s[0] <= '9' {
+ for s != "" && s[0] >= '0' && s[0] <= '9' {
+ s = s[1:]
+ }
+ } else {
+ return false
+ }
+ return s == "#" || isJSONPointer(s)
+}
+
+// isUUID tells whether given string is a valid uuid format
+// as specified in RFC4122.
+//
+// see https://datatracker.ietf.org/doc/html/rfc4122#page-4, for details
+func isUUID(v interface{}) bool {
+ s, ok := v.(string)
+ if !ok {
+ return true
+ }
+ parseHex := func(n int) bool {
+ for n > 0 {
+ if len(s) == 0 {
+ return false
+ }
+ hex := (s[0] >= '0' && s[0] <= '9') || (s[0] >= 'a' && s[0] <= 'f') || (s[0] >= 'A' && s[0] <= 'F')
+ if !hex {
+ return false
+ }
+ s = s[1:]
+ n--
+ }
+ return true
+ }
+ groups := []int{8, 4, 4, 4, 12}
+ for i, numDigits := range groups {
+ if !parseHex(numDigits) {
+ return false
+ }
+ if i == len(groups)-1 {
+ break
+ }
+ if len(s) == 0 || s[0] != '-' {
+ return false
+ }
+ s = s[1:]
+ }
+ return len(s) == 0
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
new file mode 100644
index 0000000..c94195c
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
@@ -0,0 +1,60 @@
+package jsonschema
+
+import (
+ "fmt"
+ "io"
+ "net/url"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+)
+
+func loadFileURL(s string) (io.ReadCloser, error) {
+ u, err := url.Parse(s)
+ if err != nil {
+ return nil, err
+ }
+ f := u.Path
+ if runtime.GOOS == "windows" {
+ f = strings.TrimPrefix(f, "/")
+ f = filepath.FromSlash(f)
+ }
+ return os.Open(f)
+}
+
+// Loaders is a registry of functions, which know how to load
+// absolute url of specific schema.
+//
+// New loaders can be registered by adding to this map. Key is schema,
+// value is function that knows how to load url of that schema
+var Loaders = map[string]func(url string) (io.ReadCloser, error){
+ "file": loadFileURL,
+}
+
+// LoaderNotFoundError is the error type returned by Load function.
+// It tells that no Loader is registered for that URL Scheme.
+type LoaderNotFoundError string
+
+func (e LoaderNotFoundError) Error() string {
+ return fmt.Sprintf("jsonschema: no Loader found for %s", string(e))
+}
+
+// LoadURL loads document at given absolute URL. The default implementation
+// uses Loaders registry to lookup by schema and uses that loader.
+//
+// Users can change this variable, if they would like to take complete
+// responsibility of loading given URL. Used by Compiler if its LoadURL
+// field is nil.
+var LoadURL = func(s string) (io.ReadCloser, error) {
+ u, err := url.Parse(s)
+ if err != nil {
+ return nil, err
+ }
+ loader, ok := Loaders[u.Scheme]
+ if !ok {
+ return nil, LoaderNotFoundError(s)
+
+ }
+ return loader(s)
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
new file mode 100644
index 0000000..d65ae2a
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
@@ -0,0 +1,77 @@
+package jsonschema
+
+// Flag is output format with simple boolean property valid.
+type Flag struct {
+ Valid bool `json:"valid"`
+}
+
+// FlagOutput returns output in flag format
+func (ve *ValidationError) FlagOutput() Flag {
+ return Flag{}
+}
+
+// Basic ---
+
+// Basic is output format with flat list of output units.
+type Basic struct {
+ Valid bool `json:"valid"`
+ Errors []BasicError `json:"errors"`
+}
+
+// BasicError is output unit in basic format.
+type BasicError struct {
+ KeywordLocation string `json:"keywordLocation"`
+ AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
+ InstanceLocation string `json:"instanceLocation"`
+ Error string `json:"error"`
+}
+
+// BasicOutput returns output in basic format
+func (ve *ValidationError) BasicOutput() Basic {
+ var errors []BasicError
+ var flatten func(*ValidationError)
+ flatten = func(ve *ValidationError) {
+ errors = append(errors, BasicError{
+ KeywordLocation: ve.KeywordLocation,
+ AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
+ InstanceLocation: ve.InstanceLocation,
+ Error: ve.Message,
+ })
+ for _, cause := range ve.Causes {
+ flatten(cause)
+ }
+ }
+ flatten(ve)
+ return Basic{Errors: errors}
+}
+
+// Detailed ---
+
+// Detailed is output format based on structure of schema.
+type Detailed struct {
+ Valid bool `json:"valid"`
+ KeywordLocation string `json:"keywordLocation"`
+ AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
+ InstanceLocation string `json:"instanceLocation"`
+ Error string `json:"error,omitempty"`
+ Errors []Detailed `json:"errors,omitempty"`
+}
+
+// DetailedOutput returns output in detailed format
+func (ve *ValidationError) DetailedOutput() Detailed {
+ var errors []Detailed
+ for _, cause := range ve.Causes {
+ errors = append(errors, cause.DetailedOutput())
+ }
+ var message = ve.Message
+ if len(ve.Causes) > 0 {
+ message = ""
+ }
+ return Detailed{
+ KeywordLocation: ve.KeywordLocation,
+ AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
+ InstanceLocation: ve.InstanceLocation,
+ Error: message,
+ Errors: errors,
+ }
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
new file mode 100644
index 0000000..18349da
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
@@ -0,0 +1,280 @@
+package jsonschema
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/url"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+)
+
+type resource struct {
+ url string // base url of resource. can be empty
+ floc string // fragment with json-pointer from root resource
+ doc interface{}
+ draft *Draft
+ subresources map[string]*resource // key is floc. only applicable for root resource
+ schema *Schema
+}
+
+func (r *resource) String() string {
+ return r.url + r.floc
+}
+
+func newResource(url string, r io.Reader) (*resource, error) {
+ if strings.IndexByte(url, '#') != -1 {
+ panic(fmt.Sprintf("BUG: newResource(%q)", url))
+ }
+ doc, err := unmarshal(r)
+ if err != nil {
+ return nil, fmt.Errorf("jsonschema: invalid json %s: %v", url, err)
+ }
+ url, err = toAbs(url)
+ if err != nil {
+ return nil, err
+ }
+ return &resource{
+ url: url,
+ floc: "#",
+ doc: doc,
+ }, nil
+}
+
+// fillSubschemas fills subschemas in res into r.subresources
+func (r *resource) fillSubschemas(c *Compiler, res *resource) error {
+ if err := c.validateSchema(r, res.doc, res.floc[1:]); err != nil {
+ return err
+ }
+
+ if r.subresources == nil {
+ r.subresources = make(map[string]*resource)
+ }
+ if err := r.draft.listSubschemas(res, r.baseURL(res.floc), r.subresources); err != nil {
+ return err
+ }
+
+ // ensure subresource.url uniqueness
+ url2floc := make(map[string]string)
+ for _, sr := range r.subresources {
+ if sr.url != "" {
+ if floc, ok := url2floc[sr.url]; ok {
+ return fmt.Errorf("jsonschema: %q and %q in %s have same canonical-uri", floc[1:], sr.floc[1:], r.url)
+ }
+ url2floc[sr.url] = sr.floc
+ }
+ }
+
+ return nil
+}
+
+// listResources lists all subresources in res
+func (r *resource) listResources(res *resource) []*resource {
+ var result []*resource
+ prefix := res.floc + "/"
+ for _, sr := range r.subresources {
+ if strings.HasPrefix(sr.floc, prefix) {
+ result = append(result, sr)
+ }
+ }
+ return result
+}
+
+func (r *resource) findResource(url string) *resource {
+ if r.url == url {
+ return r
+ }
+ for _, res := range r.subresources {
+ if res.url == url {
+ return res
+ }
+ }
+ return nil
+}
+
+// resolve fragment f with sr as base
+func (r *resource) resolveFragment(c *Compiler, sr *resource, f string) (*resource, error) {
+ if f == "#" || f == "#/" {
+ return sr, nil
+ }
+
+ // resolve by anchor
+ if !strings.HasPrefix(f, "#/") {
+ // check in given resource
+ for _, anchor := range r.draft.anchors(sr.doc) {
+ if anchor == f[1:] {
+ return sr, nil
+ }
+ }
+
+ // check in subresources that has same base url
+ prefix := sr.floc + "/"
+ for _, res := range r.subresources {
+ if strings.HasPrefix(res.floc, prefix) && r.baseURL(res.floc) == sr.url {
+ for _, anchor := range r.draft.anchors(res.doc) {
+ if anchor == f[1:] {
+ return res, nil
+ }
+ }
+ }
+ }
+ return nil, nil
+ }
+
+ // resolve by ptr
+ floc := sr.floc + f[1:]
+ if res, ok := r.subresources[floc]; ok {
+ return res, nil
+ }
+
+ // non-standrad location
+ doc := r.doc
+ for _, item := range strings.Split(floc[2:], "/") {
+ item = strings.Replace(item, "~1", "/", -1)
+ item = strings.Replace(item, "~0", "~", -1)
+ item, err := url.PathUnescape(item)
+ if err != nil {
+ return nil, err
+ }
+ switch d := doc.(type) {
+ case map[string]interface{}:
+ if _, ok := d[item]; !ok {
+ return nil, nil
+ }
+ doc = d[item]
+ case []interface{}:
+ index, err := strconv.Atoi(item)
+ if err != nil {
+ return nil, err
+ }
+ if index < 0 || index >= len(d) {
+ return nil, nil
+ }
+ doc = d[index]
+ default:
+ return nil, nil
+ }
+ }
+
+ id, err := r.draft.resolveID(r.baseURL(floc), doc)
+ if err != nil {
+ return nil, err
+ }
+ res := &resource{url: id, floc: floc, doc: doc}
+ r.subresources[floc] = res
+ if err := r.fillSubschemas(c, res); err != nil {
+ return nil, err
+ }
+ return res, nil
+}
+
+func (r *resource) baseURL(floc string) string {
+ for {
+ if sr, ok := r.subresources[floc]; ok {
+ if sr.url != "" {
+ return sr.url
+ }
+ }
+ slash := strings.LastIndexByte(floc, '/')
+ if slash == -1 {
+ break
+ }
+ floc = floc[:slash]
+ }
+ return r.url
+}
+
+// url helpers ---
+
+func toAbs(s string) (string, error) {
+ // if windows absolute file path, convert to file url
+ // because: net/url parses driver name as scheme
+ if runtime.GOOS == "windows" && len(s) >= 3 && s[1:3] == `:\` {
+ s = "file:///" + filepath.ToSlash(s)
+ }
+
+ u, err := url.Parse(s)
+ if err != nil {
+ return "", err
+ }
+ if u.IsAbs() {
+ return s, nil
+ }
+
+ // s is filepath
+ if s, err = filepath.Abs(s); err != nil {
+ return "", err
+ }
+ if runtime.GOOS == "windows" {
+ s = "file:///" + filepath.ToSlash(s)
+ } else {
+ s = "file://" + s
+ }
+ u, err = url.Parse(s) // to fix spaces in filepath
+ return u.String(), err
+}
+
+func resolveURL(base, ref string) (string, error) {
+ if ref == "" {
+ return base, nil
+ }
+ if strings.HasPrefix(ref, "urn:") {
+ return ref, nil
+ }
+
+ refURL, err := url.Parse(ref)
+ if err != nil {
+ return "", err
+ }
+ if refURL.IsAbs() {
+ return ref, nil
+ }
+
+ if strings.HasPrefix(base, "urn:") {
+ base, _ = split(base)
+ return base + ref, nil
+ }
+
+ baseURL, err := url.Parse(base)
+ if err != nil {
+ return "", err
+ }
+ return baseURL.ResolveReference(refURL).String(), nil
+}
+
+func split(uri string) (string, string) {
+ hash := strings.IndexByte(uri, '#')
+ if hash == -1 {
+ return uri, "#"
+ }
+ f := uri[hash:]
+ if f == "#/" {
+ f = "#"
+ }
+ return uri[0:hash], f
+}
+
+func (s *Schema) url() string {
+ u, _ := split(s.Location)
+ return u
+}
+
+func (s *Schema) loc() string {
+ _, f := split(s.Location)
+ return f[1:]
+}
+
+func unmarshal(r io.Reader) (interface{}, error) {
+ decoder := json.NewDecoder(r)
+ decoder.UseNumber()
+ var doc interface{}
+ if err := decoder.Decode(&doc); err != nil {
+ return nil, err
+ }
+ if t, _ := decoder.Token(); t != nil {
+ return nil, fmt.Errorf("invalid character %v after top-level value", t)
+ }
+ return doc, nil
+}
diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go b/vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
new file mode 100644
index 0000000..0c8d8a3
--- /dev/null
+++ b/vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
@@ -0,0 +1,826 @@
+package jsonschema
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "math/big"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+)
+
+// A Schema represents compiled version of json-schema.
+type Schema struct {
+ Location string // absolute location
+
+ meta *Schema
+ vocab []string
+ dynamicAnchors []*Schema
+
+ // type agnostic validations
+ Format string
+ format func(interface{}) bool
+ Always *bool // always pass/fail. used when booleans are used as schemas in draft-07.
+ Ref *Schema
+ RecursiveAnchor bool
+ RecursiveRef *Schema
+ DynamicAnchor string
+ DynamicRef *Schema
+ Types []string // allowed types.
+ Constant []interface{} // first element in slice is constant value. note: slice is used to capture nil constant.
+ Enum []interface{} // allowed values.
+ enumError string // error message for enum fail. captured here to avoid constructing error message every time.
+ Not *Schema
+ AllOf []*Schema
+ AnyOf []*Schema
+ OneOf []*Schema
+ If *Schema
+ Then *Schema // nil, when If is nil.
+ Else *Schema // nil, when If is nil.
+
+ // object validations
+ MinProperties int // -1 if not specified.
+ MaxProperties int // -1 if not specified.
+ Required []string // list of required properties.
+ Properties map[string]*Schema
+ PropertyNames *Schema
+ RegexProperties bool // property names must be valid regex. used only in draft4 as workaround in metaschema.
+ PatternProperties map[*regexp.Regexp]*Schema
+ AdditionalProperties interface{} // nil or bool or *Schema.
+ Dependencies map[string]interface{} // map value is *Schema or []string.
+ DependentRequired map[string][]string
+ DependentSchemas map[string]*Schema
+ UnevaluatedProperties *Schema
+
+ // array validations
+ MinItems int // -1 if not specified.
+ MaxItems int // -1 if not specified.
+ UniqueItems bool
+ Items interface{} // nil or *Schema or []*Schema
+ AdditionalItems interface{} // nil or bool or *Schema.
+ PrefixItems []*Schema
+ Items2020 *Schema // items keyword reintroduced in draft 2020-12
+ Contains *Schema
+ ContainsEval bool // whether any item in an array that passes validation of the contains schema is considered "evaluated"
+ MinContains int // 1 if not specified
+ MaxContains int // -1 if not specified
+ UnevaluatedItems *Schema
+
+ // string validations
+ MinLength int // -1 if not specified.
+ MaxLength int // -1 if not specified.
+ Pattern *regexp.Regexp
+ ContentEncoding string
+ decoder func(string) ([]byte, error)
+ ContentMediaType string
+ mediaType func([]byte) error
+ ContentSchema *Schema
+
+ // number validators
+ Minimum *big.Rat
+ ExclusiveMinimum *big.Rat
+ Maximum *big.Rat
+ ExclusiveMaximum *big.Rat
+ MultipleOf *big.Rat
+
+ // annotations. captured only when Compiler.ExtractAnnotations is true.
+ Title string
+ Description string
+ Default interface{}
+ Comment string
+ ReadOnly bool
+ WriteOnly bool
+ Examples []interface{}
+ Deprecated bool
+
+ // user defined extensions
+ Extensions map[string]ExtSchema
+}
+
+func (s *Schema) String() string {
+ return s.Location
+}
+
+func newSchema(url, floc string, doc interface{}) *Schema {
+ // fill with default values
+ s := &Schema{
+ Location: url + floc,
+ MinProperties: -1,
+ MaxProperties: -1,
+ MinItems: -1,
+ MaxItems: -1,
+ MinContains: 1,
+ MaxContains: -1,
+ MinLength: -1,
+ MaxLength: -1,
+ }
+
+ if doc, ok := doc.(map[string]interface{}); ok {
+ if ra, ok := doc["$recursiveAnchor"]; ok {
+ if ra, ok := ra.(bool); ok {
+ s.RecursiveAnchor = ra
+ }
+ }
+ if da, ok := doc["$dynamicAnchor"]; ok {
+ if da, ok := da.(string); ok {
+ s.DynamicAnchor = da
+ }
+ }
+ }
+ return s
+}
+
+func (s *Schema) hasVocab(name string) bool {
+ if s == nil { // during bootstrap
+ return true
+ }
+ if name == "core" {
+ return true
+ }
+ for _, url := range s.vocab {
+ if url == "https://json-schema.org/draft/2019-09/vocab/"+name {
+ return true
+ }
+ if url == "https://json-schema.org/draft/2020-12/vocab/"+name {
+ return true
+ }
+ }
+ return false
+}
+
+// Validate validates given doc, against the json-schema s.
+//
+// the v must be the raw json value. for number precision
+// unmarshal with json.UseNumber().
+//
+// returns *ValidationError if v does not confirm with schema s.
+// returns InfiniteLoopError if it detects loop during validation.
+// returns InvalidJSONTypeError if it detects any non json value in v.
+func (s *Schema) Validate(v interface{}) (err error) {
+ return s.validateValue(v, "")
+}
+
+func (s *Schema) validateValue(v interface{}, vloc string) (err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ switch r := r.(type) {
+ case InfiniteLoopError, InvalidJSONTypeError:
+ err = r.(error)
+ default:
+ panic(r)
+ }
+ }
+ }()
+ if _, err := s.validate(nil, 0, "", v, vloc); err != nil {
+ ve := ValidationError{
+ KeywordLocation: "",
+ AbsoluteKeywordLocation: s.Location,
+ InstanceLocation: vloc,
+ Message: fmt.Sprintf("doesn't validate with %s", s.Location),
+ }
+ return ve.causes(err)
+ }
+ return nil
+}
+
+// validate validates given value v with this schema.
+func (s *Schema) validate(scope []schemaRef, vscope int, spath string, v interface{}, vloc string) (result validationResult, err error) {
+ validationError := func(keywordPath string, format string, a ...interface{}) *ValidationError {
+ return &ValidationError{
+ KeywordLocation: keywordLocation(scope, keywordPath),
+ AbsoluteKeywordLocation: joinPtr(s.Location, keywordPath),
+ InstanceLocation: vloc,
+ Message: fmt.Sprintf(format, a...),
+ }
+ }
+
+ sref := schemaRef{spath, s, false}
+ if err := checkLoop(scope[len(scope)-vscope:], sref); err != nil {
+ panic(err)
+ }
+ scope = append(scope, sref)
+ vscope++
+
+ // populate result
+ switch v := v.(type) {
+ case map[string]interface{}:
+ result.unevalProps = make(map[string]struct{})
+ for pname := range v {
+ result.unevalProps[pname] = struct{}{}
+ }
+ case []interface{}:
+ result.unevalItems = make(map[int]struct{})
+ for i := range v {
+ result.unevalItems[i] = struct{}{}
+ }
+ }
+
+ validate := func(sch *Schema, schPath string, v interface{}, vpath string) error {
+ vloc := vloc
+ if vpath != "" {
+ vloc += "/" + vpath
+ }
+ _, err := sch.validate(scope, 0, schPath, v, vloc)
+ return err
+ }
+
+ validateInplace := func(sch *Schema, schPath string) error {
+ vr, err := sch.validate(scope, vscope, schPath, v, vloc)
+ if err == nil {
+ // update result
+ for pname := range result.unevalProps {
+ if _, ok := vr.unevalProps[pname]; !ok {
+ delete(result.unevalProps, pname)
+ }
+ }
+ for i := range result.unevalItems {
+ if _, ok := vr.unevalItems[i]; !ok {
+ delete(result.unevalItems, i)
+ }
+ }
+ }
+ return err
+ }
+
+ if s.Always != nil {
+ if !*s.Always {
+ return result, validationError("", "not allowed")
+ }
+ return result, nil
+ }
+
+ if len(s.Types) > 0 {
+ vType := jsonType(v)
+ matched := false
+ for _, t := range s.Types {
+ if vType == t {
+ matched = true
+ break
+ } else if t == "integer" && vType == "number" {
+ num, _ := new(big.Rat).SetString(fmt.Sprint(v))
+ if num.IsInt() {
+ matched = true
+ break
+ }
+ }
+ }
+ if !matched {
+ return result, validationError("type", "expected %s, but got %s", strings.Join(s.Types, " or "), vType)
+ }
+ }
+
+ var errors []error
+
+ if len(s.Constant) > 0 {
+ if !equals(v, s.Constant[0]) {
+ switch jsonType(s.Constant[0]) {
+ case "object", "array":
+ errors = append(errors, validationError("const", "const failed"))
+ default:
+ errors = append(errors, validationError("const", "value must be %#v", s.Constant[0]))
+ }
+ }
+ }
+
+ if len(s.Enum) > 0 {
+ matched := false
+ for _, item := range s.Enum {
+ if equals(v, item) {
+ matched = true
+ break
+ }
+ }
+ if !matched {
+ errors = append(errors, validationError("enum", s.enumError))
+ }
+ }
+
+ if s.format != nil && !s.format(v) {
+ var val = v
+ if v, ok := v.(string); ok {
+ val = quote(v)
+ }
+ errors = append(errors, validationError("format", "%v is not valid %s", val, quote(s.Format)))
+ }
+
+ switch v := v.(type) {
+ case map[string]interface{}:
+ if s.MinProperties != -1 && len(v) < s.MinProperties {
+ errors = append(errors, validationError("minProperties", "minimum %d properties allowed, but found %d properties", s.MinProperties, len(v)))
+ }
+ if s.MaxProperties != -1 && len(v) > s.MaxProperties {
+ errors = append(errors, validationError("maxProperties", "maximum %d properties allowed, but found %d properties", s.MaxProperties, len(v)))
+ }
+ if len(s.Required) > 0 {
+ var missing []string
+ for _, pname := range s.Required {
+ if _, ok := v[pname]; !ok {
+ missing = append(missing, quote(pname))
+ }
+ }
+ if len(missing) > 0 {
+ errors = append(errors, validationError("required", "missing properties: %s", strings.Join(missing, ", ")))
+ }
+ }
+
+ for pname, sch := range s.Properties {
+ if pvalue, ok := v[pname]; ok {
+ delete(result.unevalProps, pname)
+ if err := validate(sch, "properties/"+escape(pname), pvalue, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+
+ if s.PropertyNames != nil {
+ for pname := range v {
+ if err := validate(s.PropertyNames, "propertyNames", pname, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+
+ if s.RegexProperties {
+ for pname := range v {
+ if !isRegex(pname) {
+ errors = append(errors, validationError("", "patternProperty %s is not valid regex", quote(pname)))
+ }
+ }
+ }
+ for pattern, sch := range s.PatternProperties {
+ for pname, pvalue := range v {
+ if pattern.MatchString(pname) {
+ delete(result.unevalProps, pname)
+ if err := validate(sch, "patternProperties/"+escape(pattern.String()), pvalue, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+ }
+ if s.AdditionalProperties != nil {
+ if allowed, ok := s.AdditionalProperties.(bool); ok {
+ if !allowed && len(result.unevalProps) > 0 {
+ errors = append(errors, validationError("additionalProperties", "additionalProperties %s not allowed", result.unevalPnames()))
+ }
+ } else {
+ schema := s.AdditionalProperties.(*Schema)
+ for pname := range result.unevalProps {
+ if pvalue, ok := v[pname]; ok {
+ if err := validate(schema, "additionalProperties", pvalue, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+ }
+ result.unevalProps = nil
+ }
+ for dname, dvalue := range s.Dependencies {
+ if _, ok := v[dname]; ok {
+ switch dvalue := dvalue.(type) {
+ case *Schema:
+ if err := validateInplace(dvalue, "dependencies/"+escape(dname)); err != nil {
+ errors = append(errors, err)
+ }
+ case []string:
+ for i, pname := range dvalue {
+ if _, ok := v[pname]; !ok {
+ errors = append(errors, validationError("dependencies/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
+ }
+ }
+ }
+ }
+ }
+ for dname, dvalue := range s.DependentRequired {
+ if _, ok := v[dname]; ok {
+ for i, pname := range dvalue {
+ if _, ok := v[pname]; !ok {
+ errors = append(errors, validationError("dependentRequired/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
+ }
+ }
+ }
+ }
+ for dname, sch := range s.DependentSchemas {
+ if _, ok := v[dname]; ok {
+ if err := validateInplace(sch, "dependentSchemas/"+escape(dname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+
+ case []interface{}:
+ if s.MinItems != -1 && len(v) < s.MinItems {
+ errors = append(errors, validationError("minItems", "minimum %d items required, but found %d items", s.MinItems, len(v)))
+ }
+ if s.MaxItems != -1 && len(v) > s.MaxItems {
+ errors = append(errors, validationError("maxItems", "maximum %d items required, but found %d items", s.MaxItems, len(v)))
+ }
+ if s.UniqueItems {
+ for i := 1; i < len(v); i++ {
+ for j := 0; j < i; j++ {
+ if equals(v[i], v[j]) {
+ errors = append(errors, validationError("uniqueItems", "items at index %d and %d are equal", j, i))
+ }
+ }
+ }
+ }
+
+ // items + additionalItems
+ switch items := s.Items.(type) {
+ case *Schema:
+ for i, item := range v {
+ if err := validate(items, "items", item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ result.unevalItems = nil
+ case []*Schema:
+ for i, item := range v {
+ if i < len(items) {
+ delete(result.unevalItems, i)
+ if err := validate(items[i], "items/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ } else if sch, ok := s.AdditionalItems.(*Schema); ok {
+ delete(result.unevalItems, i)
+ if err := validate(sch, "additionalItems", item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ } else {
+ break
+ }
+ }
+ if additionalItems, ok := s.AdditionalItems.(bool); ok {
+ if additionalItems {
+ result.unevalItems = nil
+ } else if len(v) > len(items) {
+ errors = append(errors, validationError("additionalItems", "only %d items are allowed, but found %d items", len(items), len(v)))
+ }
+ }
+ }
+
+ // prefixItems + items
+ for i, item := range v {
+ if i < len(s.PrefixItems) {
+ delete(result.unevalItems, i)
+ if err := validate(s.PrefixItems[i], "prefixItems/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ } else if s.Items2020 != nil {
+ delete(result.unevalItems, i)
+ if err := validate(s.Items2020, "items", item, strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ } else {
+ break
+ }
+ }
+
+ // contains + minContains + maxContains
+ if s.Contains != nil && (s.MinContains != -1 || s.MaxContains != -1) {
+ matched := 0
+ var causes []error
+ for i, item := range v {
+ if err := validate(s.Contains, "contains", item, strconv.Itoa(i)); err != nil {
+ causes = append(causes, err)
+ } else {
+ matched++
+ if s.ContainsEval {
+ delete(result.unevalItems, i)
+ }
+ }
+ }
+ if s.MinContains != -1 && matched < s.MinContains {
+ errors = append(errors, validationError("minContains", "valid must be >= %d, but got %d", s.MinContains, matched).add(causes...))
+ }
+ if s.MaxContains != -1 && matched > s.MaxContains {
+ errors = append(errors, validationError("maxContains", "valid must be <= %d, but got %d", s.MaxContains, matched))
+ }
+ }
+
+ case string:
+ // minLength + maxLength
+ if s.MinLength != -1 || s.MaxLength != -1 {
+ length := utf8.RuneCount([]byte(v))
+ if s.MinLength != -1 && length < s.MinLength {
+ errors = append(errors, validationError("minLength", "length must be >= %d, but got %d", s.MinLength, length))
+ }
+ if s.MaxLength != -1 && length > s.MaxLength {
+ errors = append(errors, validationError("maxLength", "length must be <= %d, but got %d", s.MaxLength, length))
+ }
+ }
+
+ if s.Pattern != nil && !s.Pattern.MatchString(v) {
+ errors = append(errors, validationError("pattern", "does not match pattern %s", quote(s.Pattern.String())))
+ }
+
+ // contentEncoding + contentMediaType
+ if s.decoder != nil || s.mediaType != nil {
+ decoded := s.ContentEncoding == ""
+ var content []byte
+ if s.decoder != nil {
+ b, err := s.decoder(v)
+ if err != nil {
+ errors = append(errors, validationError("contentEncoding", "value is not %s encoded", s.ContentEncoding))
+ } else {
+ content, decoded = b, true
+ }
+ }
+ if decoded && s.mediaType != nil {
+ if s.decoder == nil {
+ content = []byte(v)
+ }
+ if err := s.mediaType(content); err != nil {
+ errors = append(errors, validationError("contentMediaType", "value is not of mediatype %s", quote(s.ContentMediaType)))
+ }
+ }
+ if decoded && s.ContentSchema != nil {
+ contentJSON, err := unmarshal(bytes.NewReader(content))
+ if err != nil {
+ errors = append(errors, validationError("contentSchema", "value is not valid json"))
+ } else {
+ err := validate(s.ContentSchema, "contentSchema", contentJSON, "")
+ if err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+ }
+
+ case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
+ // lazy convert to *big.Rat to avoid allocation
+ var numVal *big.Rat
+ num := func() *big.Rat {
+ if numVal == nil {
+ numVal, _ = new(big.Rat).SetString(fmt.Sprint(v))
+ }
+ return numVal
+ }
+ f64 := func(r *big.Rat) float64 {
+ f, _ := r.Float64()
+ return f
+ }
+ if s.Minimum != nil && num().Cmp(s.Minimum) < 0 {
+ errors = append(errors, validationError("minimum", "must be >= %v but found %v", f64(s.Minimum), v))
+ }
+ if s.ExclusiveMinimum != nil && num().Cmp(s.ExclusiveMinimum) <= 0 {
+ errors = append(errors, validationError("exclusiveMinimum", "must be > %v but found %v", f64(s.ExclusiveMinimum), v))
+ }
+ if s.Maximum != nil && num().Cmp(s.Maximum) > 0 {
+ errors = append(errors, validationError("maximum", "must be <= %v but found %v", f64(s.Maximum), v))
+ }
+ if s.ExclusiveMaximum != nil && num().Cmp(s.ExclusiveMaximum) >= 0 {
+ errors = append(errors, validationError("exclusiveMaximum", "must be < %v but found %v", f64(s.ExclusiveMaximum), v))
+ }
+ if s.MultipleOf != nil {
+ if q := new(big.Rat).Quo(num(), s.MultipleOf); !q.IsInt() {
+ errors = append(errors, validationError("multipleOf", "%v not multipleOf %v", v, f64(s.MultipleOf)))
+ }
+ }
+ }
+
+ // $ref + $recursiveRef + $dynamicRef
+ validateRef := func(sch *Schema, refPath string) error {
+ if sch != nil {
+ if err := validateInplace(sch, refPath); err != nil {
+ var url = sch.Location
+ if s.url() == sch.url() {
+ url = sch.loc()
+ }
+ return validationError(refPath, "doesn't validate with %s", quote(url)).causes(err)
+ }
+ }
+ return nil
+ }
+ if err := validateRef(s.Ref, "$ref"); err != nil {
+ errors = append(errors, err)
+ }
+ if s.RecursiveRef != nil {
+ sch := s.RecursiveRef
+ if sch.RecursiveAnchor {
+ // recursiveRef based on scope
+ for _, e := range scope {
+ if e.schema.RecursiveAnchor {
+ sch = e.schema
+ break
+ }
+ }
+ }
+ if err := validateRef(sch, "$recursiveRef"); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ if s.DynamicRef != nil {
+ sch := s.DynamicRef
+ if sch.DynamicAnchor != "" {
+ // dynamicRef based on scope
+ for i := len(scope) - 1; i >= 0; i-- {
+ sr := scope[i]
+ if sr.discard {
+ break
+ }
+ for _, da := range sr.schema.dynamicAnchors {
+ if da.DynamicAnchor == s.DynamicRef.DynamicAnchor && da != s.DynamicRef {
+ sch = da
+ break
+ }
+ }
+ }
+ }
+ if err := validateRef(sch, "$dynamicRef"); err != nil {
+ errors = append(errors, err)
+ }
+ }
+
+ if s.Not != nil && validateInplace(s.Not, "not") == nil {
+ errors = append(errors, validationError("not", "not failed"))
+ }
+
+ for i, sch := range s.AllOf {
+ schPath := "allOf/" + strconv.Itoa(i)
+ if err := validateInplace(sch, schPath); err != nil {
+ errors = append(errors, validationError(schPath, "allOf failed").add(err))
+ }
+ }
+
+ if len(s.AnyOf) > 0 {
+ matched := false
+ var causes []error
+ for i, sch := range s.AnyOf {
+ if err := validateInplace(sch, "anyOf/"+strconv.Itoa(i)); err == nil {
+ matched = true
+ } else {
+ causes = append(causes, err)
+ }
+ }
+ if !matched {
+ errors = append(errors, validationError("anyOf", "anyOf failed").add(causes...))
+ }
+ }
+
+ if len(s.OneOf) > 0 {
+ matched := -1
+ var causes []error
+ for i, sch := range s.OneOf {
+ if err := validateInplace(sch, "oneOf/"+strconv.Itoa(i)); err == nil {
+ if matched == -1 {
+ matched = i
+ } else {
+ errors = append(errors, validationError("oneOf", "valid against schemas at indexes %d and %d", matched, i))
+ break
+ }
+ } else {
+ causes = append(causes, err)
+ }
+ }
+ if matched == -1 {
+ errors = append(errors, validationError("oneOf", "oneOf failed").add(causes...))
+ }
+ }
+
+ // if + then + else
+ if s.If != nil {
+ err := validateInplace(s.If, "if")
+ // "if" leaves dynamic scope
+ scope[len(scope)-1].discard = true
+ if err == nil {
+ if s.Then != nil {
+ if err := validateInplace(s.Then, "then"); err != nil {
+ errors = append(errors, validationError("then", "if-then failed").add(err))
+ }
+ }
+ } else {
+ if s.Else != nil {
+ if err := validateInplace(s.Else, "else"); err != nil {
+ errors = append(errors, validationError("else", "if-else failed").add(err))
+ }
+ }
+ }
+ // restore dynamic scope
+ scope[len(scope)-1].discard = false
+ }
+
+ for _, ext := range s.Extensions {
+ if err := ext.Validate(ValidationContext{result, validate, validateInplace, validationError}, v); err != nil {
+ errors = append(errors, err)
+ }
+ }
+
+ // UnevaluatedProperties + UnevaluatedItems
+ switch v := v.(type) {
+ case map[string]interface{}:
+ if s.UnevaluatedProperties != nil {
+ for pname := range result.unevalProps {
+ if pvalue, ok := v[pname]; ok {
+ if err := validate(s.UnevaluatedProperties, "UnevaluatedProperties", pvalue, escape(pname)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ }
+ result.unevalProps = nil
+ }
+ case []interface{}:
+ if s.UnevaluatedItems != nil {
+ for i := range result.unevalItems {
+ if err := validate(s.UnevaluatedItems, "UnevaluatedItems", v[i], strconv.Itoa(i)); err != nil {
+ errors = append(errors, err)
+ }
+ }
+ result.unevalItems = nil
+ }
+ }
+
+ switch len(errors) {
+ case 0:
+ return result, nil
+ case 1:
+ return result, errors[0]
+ default:
+ return result, validationError("", "").add(errors...) // empty message, used just for wrapping
+ }
+}
+
+type validationResult struct {
+ unevalProps map[string]struct{}
+ unevalItems map[int]struct{}
+}
+
+func (vr validationResult) unevalPnames() string {
+ pnames := make([]string, 0, len(vr.unevalProps))
+ for pname := range vr.unevalProps {
+ pnames = append(pnames, quote(pname))
+ }
+ return strings.Join(pnames, ", ")
+}
+
+// jsonType returns the json type of given value v.
+//
+// It panics if the given value is not valid json value
+func jsonType(v interface{}) string {
+ switch v.(type) {
+ case nil:
+ return "null"
+ case bool:
+ return "boolean"
+ case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
+ return "number"
+ case string:
+ return "string"
+ case []interface{}:
+ return "array"
+ case map[string]interface{}:
+ return "object"
+ }
+ panic(InvalidJSONTypeError(fmt.Sprintf("%T", v)))
+}
+
+// equals tells if given two json values are equal or not.
+func equals(v1, v2 interface{}) bool {
+ v1Type := jsonType(v1)
+ if v1Type != jsonType(v2) {
+ return false
+ }
+ switch v1Type {
+ case "array":
+ arr1, arr2 := v1.([]interface{}), v2.([]interface{})
+ if len(arr1) != len(arr2) {
+ return false
+ }
+ for i := range arr1 {
+ if !equals(arr1[i], arr2[i]) {
+ return false
+ }
+ }
+ return true
+ case "object":
+ obj1, obj2 := v1.(map[string]interface{}), v2.(map[string]interface{})
+ if len(obj1) != len(obj2) {
+ return false
+ }
+ for k, v1 := range obj1 {
+ if v2, ok := obj2[k]; ok {
+ if !equals(v1, v2) {
+ return false
+ }
+ } else {
+ return false
+ }
+ }
+ return true
+ case "number":
+ num1, _ := new(big.Rat).SetString(fmt.Sprint(v1))
+ num2, _ := new(big.Rat).SetString(fmt.Sprint(v2))
+ return num1.Cmp(num2) == 0
+ default:
+ return v1 == v2
+ }
+}
+
+// escape converts given token to valid json-pointer token
+func escape(token string) string {
+ token = strings.ReplaceAll(token, "~", "~0")
+ token = strings.ReplaceAll(token, "/", "~1")
+ return url.PathEscape(token)
+}
diff --git a/vendor/github.com/xeipuuv/gojsonschema/go.mod b/vendor/github.com/xeipuuv/gojsonschema/go.mod
deleted file mode 100644
index b709d7f..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/go.mod
+++ /dev/null
@@ -1,7 +0,0 @@
-module github.com/xeipuuv/gojsonschema
-
-require (
- github.com/stretchr/testify v1.3.0
- github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect
- github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415
-)
diff --git a/vendor/github.com/xeipuuv/gojsonschema/go.sum b/vendor/github.com/xeipuuv/gojsonschema/go.sum
deleted file mode 100644
index 0e865ac..0000000
--- a/vendor/github.com/xeipuuv/gojsonschema/go.sum
+++ /dev/null
@@ -1,11 +0,0 @@
-github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
-github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
-github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
-github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
-github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
-github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
-github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
diff --git a/vendor/gopkg.in/yaml.v2/go.mod b/vendor/gopkg.in/yaml.v2/go.mod
deleted file mode 100644
index 2cbb85a..0000000
--- a/vendor/gopkg.in/yaml.v2/go.mod
+++ /dev/null
@@ -1,5 +0,0 @@
-module gopkg.in/yaml.v2
-
-go 1.15
-
-require gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 440e8b6..02e82be 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -1,6 +1,6 @@
-# github.com/beevik/etree v1.1.0
-## explicit
-github.com/beevik/etree
+# github.com/santhosh-tekuri/jsonschema/v5 v5.1.1
+## explicit; go 1.15
+github.com/santhosh-tekuri/jsonschema/v5
# github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb
## explicit
github.com/xeipuuv/gojsonpointer
@@ -11,8 +11,8 @@ github.com/xeipuuv/gojsonreference
## explicit
github.com/xeipuuv/gojsonschema
# gopkg.in/yaml.v2 v2.4.0
-## explicit
+## explicit; go 1.15
gopkg.in/yaml.v2
# sigs.k8s.io/yaml v1.2.0
-## explicit
+## explicit; go 1.12
sigs.k8s.io/yaml
diff --git a/vendor/sigs.k8s.io/yaml/go.mod b/vendor/sigs.k8s.io/yaml/go.mod
deleted file mode 100644
index 7224f34..0000000
--- a/vendor/sigs.k8s.io/yaml/go.mod
+++ /dev/null
@@ -1,8 +0,0 @@
-module sigs.k8s.io/yaml
-
-go 1.12
-
-require (
- github.com/davecgh/go-spew v1.1.1
- gopkg.in/yaml.v2 v2.2.8
-)
diff --git a/vendor/sigs.k8s.io/yaml/go.sum b/vendor/sigs.k8s.io/yaml/go.sum
deleted file mode 100644
index 76e4948..0000000
--- a/vendor/sigs.k8s.io/yaml/go.sum
+++ /dev/null
@@ -1,9 +0,0 @@
-github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
-gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
-gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
-gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=