mirror of
https://github.com/yannh/kubeconform.git
synced 2026-02-11 22:19:25 +00:00
Try migrating to new JSON validation library
This commit is contained in:
parent
752a33eaeb
commit
ba844cad0a
31 changed files with 4753 additions and 2749 deletions
11
go.mod
11
go.mod
|
|
@ -3,10 +3,13 @@ module github.com/yannh/kubeconform
|
|||
go 1.17
|
||||
|
||||
require (
|
||||
github.com/beevik/etree v1.1.0
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.1.1
|
||||
github.com/xeipuuv/gojsonschema v1.2.0
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
sigs.k8s.io/yaml v1.2.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
)
|
||||
|
|
|
|||
4
go.sum
4
go.sum
|
|
@ -1,10 +1,10 @@
|
|||
github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs=
|
||||
github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.1.1 h1:lEOLY2vyGIqKWUI9nzsOJRV3mb3WC9dXYORsLEUcoeY=
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.1.1/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
|
|
|
|||
|
|
@ -6,11 +6,10 @@ import (
|
|||
"fmt"
|
||||
"io"
|
||||
|
||||
jsonschema "github.com/santhosh-tekuri/jsonschema/v5"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"github.com/yannh/kubeconform/pkg/registry"
|
||||
"github.com/yannh/kubeconform/pkg/resource"
|
||||
|
||||
"github.com/xeipuuv/gojsonschema"
|
||||
"sigs.k8s.io/yaml"
|
||||
)
|
||||
|
||||
|
|
@ -91,7 +90,7 @@ func New(schemaLocations []string, opts Opts) (Validator, error) {
|
|||
type v struct {
|
||||
opts Opts
|
||||
schemaCache cache.Cache
|
||||
schemaDownload func(registries []registry.Registry, kind, version, k8sVersion string) (*gojsonschema.Schema, error)
|
||||
schemaDownload func(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error)
|
||||
regs []registry.Registry
|
||||
}
|
||||
|
||||
|
|
@ -151,13 +150,13 @@ func (val *v) ValidateResource(res resource.Resource) Result {
|
|||
}
|
||||
|
||||
cached := false
|
||||
var schema *gojsonschema.Schema
|
||||
var schema *jsonschema.Schema
|
||||
|
||||
if val.schemaCache != nil {
|
||||
s, err := val.schemaCache.Get(sig.Kind, sig.Version, val.opts.KubernetesVersion)
|
||||
if err == nil {
|
||||
cached = true
|
||||
schema = s.(*gojsonschema.Schema)
|
||||
schema = s.(*jsonschema.Schema)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -179,28 +178,13 @@ func (val *v) ValidateResource(res resource.Resource) Result {
|
|||
return Result{Resource: res, Err: fmt.Errorf("could not find schema for %s", sig.Kind), Status: Error}
|
||||
}
|
||||
|
||||
resourceLoader := gojsonschema.NewGoLoader(r)
|
||||
|
||||
results, err := schema.Validate(resourceLoader)
|
||||
err = schema.Validate(r)
|
||||
if err != nil {
|
||||
// This error can only happen if the Object to validate is poorly formed. There's no hope of saving this one
|
||||
return Result{Resource: res, Status: Error, Err: fmt.Errorf("problem validating schema. Check JSON formatting: %s", err)}
|
||||
}
|
||||
|
||||
if results.Valid() {
|
||||
return Result{Resource: res, Status: Valid}
|
||||
}
|
||||
|
||||
msg := ""
|
||||
for _, errMsg := range results.Errors() {
|
||||
if msg != "" {
|
||||
msg += " - "
|
||||
}
|
||||
details := errMsg.Details()
|
||||
msg += fmt.Sprintf("For field %s: %s", details["field"].(string), errMsg.Description())
|
||||
}
|
||||
|
||||
return Result{Resource: res, Status: Invalid, Err: fmt.Errorf("%s", msg)}
|
||||
return Result{Resource: res, Status: Valid}
|
||||
}
|
||||
|
||||
// ValidateWithContext validates resources found in r
|
||||
|
|
@ -235,17 +219,17 @@ func (val *v) Validate(filename string, r io.ReadCloser) []Result {
|
|||
return val.ValidateWithContext(context.Background(), filename, r)
|
||||
}
|
||||
|
||||
func downloadSchema(registries []registry.Registry, kind, version, k8sVersion string) (*gojsonschema.Schema, error) {
|
||||
func downloadSchema(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error) {
|
||||
var err error
|
||||
var schemaBytes []byte
|
||||
|
||||
for _, reg := range registries {
|
||||
schemaBytes, err = reg.DownloadSchema(kind, version, k8sVersion)
|
||||
if err == nil {
|
||||
schema, err := gojsonschema.NewSchema(gojsonschema.NewBytesLoader(schemaBytes))
|
||||
|
||||
schema, err := jsonschema.CompileString(fmt.Sprintf("%s%s%s", kind, version, k8sVersion), string(schemaBytes))
|
||||
// If we got a non-parseable response, we try the next registry
|
||||
if err != nil {
|
||||
fmt.Printf("TOTO %s\n", err)
|
||||
continue
|
||||
}
|
||||
return schema, err
|
||||
|
|
|
|||
14
vendor/github.com/beevik/etree/.travis.yml
generated
vendored
14
vendor/github.com/beevik/etree/.travis.yml
generated
vendored
|
|
@ -1,14 +0,0 @@
|
|||
language: go
|
||||
sudo: false
|
||||
|
||||
go:
|
||||
- 1.11.x
|
||||
- tip
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- go: tip
|
||||
|
||||
script:
|
||||
- go vet ./...
|
||||
- go test -v ./...
|
||||
10
vendor/github.com/beevik/etree/CONTRIBUTORS
generated
vendored
10
vendor/github.com/beevik/etree/CONTRIBUTORS
generated
vendored
|
|
@ -1,10 +0,0 @@
|
|||
Brett Vickers (beevik)
|
||||
Felix Geisendörfer (felixge)
|
||||
Kamil Kisiel (kisielk)
|
||||
Graham King (grahamking)
|
||||
Matt Smith (ma314smith)
|
||||
Michal Jemala (michaljemala)
|
||||
Nicolas Piganeau (npiganeau)
|
||||
Chris Brown (ccbrown)
|
||||
Earncef Sequeira (earncef)
|
||||
Gabriel de Labachelerie (wuzuf)
|
||||
24
vendor/github.com/beevik/etree/LICENSE
generated
vendored
24
vendor/github.com/beevik/etree/LICENSE
generated
vendored
|
|
@ -1,24 +0,0 @@
|
|||
Copyright 2015-2019 Brett Vickers. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDER ``AS IS'' AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER OR
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
205
vendor/github.com/beevik/etree/README.md
generated
vendored
205
vendor/github.com/beevik/etree/README.md
generated
vendored
|
|
@ -1,205 +0,0 @@
|
|||
[](https://travis-ci.org/beevik/etree)
|
||||
[](https://godoc.org/github.com/beevik/etree)
|
||||
|
||||
etree
|
||||
=====
|
||||
|
||||
The etree package is a lightweight, pure go package that expresses XML in
|
||||
the form of an element tree. Its design was inspired by the Python
|
||||
[ElementTree](http://docs.python.org/2/library/xml.etree.elementtree.html)
|
||||
module.
|
||||
|
||||
Some of the package's capabilities and features:
|
||||
|
||||
* Represents XML documents as trees of elements for easy traversal.
|
||||
* Imports, serializes, modifies or creates XML documents from scratch.
|
||||
* Writes and reads XML to/from files, byte slices, strings and io interfaces.
|
||||
* Performs simple or complex searches with lightweight XPath-like query APIs.
|
||||
* Auto-indents XML using spaces or tabs for better readability.
|
||||
* Implemented in pure go; depends only on standard go libraries.
|
||||
* Built on top of the go [encoding/xml](http://golang.org/pkg/encoding/xml)
|
||||
package.
|
||||
|
||||
### Creating an XML document
|
||||
|
||||
The following example creates an XML document from scratch using the etree
|
||||
package and outputs its indented contents to stdout.
|
||||
```go
|
||||
doc := etree.NewDocument()
|
||||
doc.CreateProcInst("xml", `version="1.0" encoding="UTF-8"`)
|
||||
doc.CreateProcInst("xml-stylesheet", `type="text/xsl" href="style.xsl"`)
|
||||
|
||||
people := doc.CreateElement("People")
|
||||
people.CreateComment("These are all known people")
|
||||
|
||||
jon := people.CreateElement("Person")
|
||||
jon.CreateAttr("name", "Jon")
|
||||
|
||||
sally := people.CreateElement("Person")
|
||||
sally.CreateAttr("name", "Sally")
|
||||
|
||||
doc.Indent(2)
|
||||
doc.WriteTo(os.Stdout)
|
||||
```
|
||||
|
||||
Output:
|
||||
```xml
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml-stylesheet type="text/xsl" href="style.xsl"?>
|
||||
<People>
|
||||
<!--These are all known people-->
|
||||
<Person name="Jon"/>
|
||||
<Person name="Sally"/>
|
||||
</People>
|
||||
```
|
||||
|
||||
### Reading an XML file
|
||||
|
||||
Suppose you have a file on disk called `bookstore.xml` containing the
|
||||
following data:
|
||||
|
||||
```xml
|
||||
<bookstore xmlns:p="urn:schemas-books-com:prices">
|
||||
|
||||
<book category="COOKING">
|
||||
<title lang="en">Everyday Italian</title>
|
||||
<author>Giada De Laurentiis</author>
|
||||
<year>2005</year>
|
||||
<p:price>30.00</p:price>
|
||||
</book>
|
||||
|
||||
<book category="CHILDREN">
|
||||
<title lang="en">Harry Potter</title>
|
||||
<author>J K. Rowling</author>
|
||||
<year>2005</year>
|
||||
<p:price>29.99</p:price>
|
||||
</book>
|
||||
|
||||
<book category="WEB">
|
||||
<title lang="en">XQuery Kick Start</title>
|
||||
<author>James McGovern</author>
|
||||
<author>Per Bothner</author>
|
||||
<author>Kurt Cagle</author>
|
||||
<author>James Linn</author>
|
||||
<author>Vaidyanathan Nagarajan</author>
|
||||
<year>2003</year>
|
||||
<p:price>49.99</p:price>
|
||||
</book>
|
||||
|
||||
<book category="WEB">
|
||||
<title lang="en">Learning XML</title>
|
||||
<author>Erik T. Ray</author>
|
||||
<year>2003</year>
|
||||
<p:price>39.95</p:price>
|
||||
</book>
|
||||
|
||||
</bookstore>
|
||||
```
|
||||
|
||||
This code reads the file's contents into an etree document.
|
||||
```go
|
||||
doc := etree.NewDocument()
|
||||
if err := doc.ReadFromFile("bookstore.xml"); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
```
|
||||
|
||||
You can also read XML from a string, a byte slice, or an `io.Reader`.
|
||||
|
||||
### Processing elements and attributes
|
||||
|
||||
This example illustrates several ways to access elements and attributes using
|
||||
etree selection queries.
|
||||
```go
|
||||
root := doc.SelectElement("bookstore")
|
||||
fmt.Println("ROOT element:", root.Tag)
|
||||
|
||||
for _, book := range root.SelectElements("book") {
|
||||
fmt.Println("CHILD element:", book.Tag)
|
||||
if title := book.SelectElement("title"); title != nil {
|
||||
lang := title.SelectAttrValue("lang", "unknown")
|
||||
fmt.Printf(" TITLE: %s (%s)\n", title.Text(), lang)
|
||||
}
|
||||
for _, attr := range book.Attr {
|
||||
fmt.Printf(" ATTR: %s=%s\n", attr.Key, attr.Value)
|
||||
}
|
||||
}
|
||||
```
|
||||
Output:
|
||||
```
|
||||
ROOT element: bookstore
|
||||
CHILD element: book
|
||||
TITLE: Everyday Italian (en)
|
||||
ATTR: category=COOKING
|
||||
CHILD element: book
|
||||
TITLE: Harry Potter (en)
|
||||
ATTR: category=CHILDREN
|
||||
CHILD element: book
|
||||
TITLE: XQuery Kick Start (en)
|
||||
ATTR: category=WEB
|
||||
CHILD element: book
|
||||
TITLE: Learning XML (en)
|
||||
ATTR: category=WEB
|
||||
```
|
||||
|
||||
### Path queries
|
||||
|
||||
This example uses etree's path functions to select all book titles that fall
|
||||
into the category of 'WEB'. The double-slash prefix in the path causes the
|
||||
search for book elements to occur recursively; book elements may appear at any
|
||||
level of the XML hierarchy.
|
||||
```go
|
||||
for _, t := range doc.FindElements("//book[@category='WEB']/title") {
|
||||
fmt.Println("Title:", t.Text())
|
||||
}
|
||||
```
|
||||
|
||||
Output:
|
||||
```
|
||||
Title: XQuery Kick Start
|
||||
Title: Learning XML
|
||||
```
|
||||
|
||||
This example finds the first book element under the root bookstore element and
|
||||
outputs the tag and text of each of its child elements.
|
||||
```go
|
||||
for _, e := range doc.FindElements("./bookstore/book[1]/*") {
|
||||
fmt.Printf("%s: %s\n", e.Tag, e.Text())
|
||||
}
|
||||
```
|
||||
|
||||
Output:
|
||||
```
|
||||
title: Everyday Italian
|
||||
author: Giada De Laurentiis
|
||||
year: 2005
|
||||
price: 30.00
|
||||
```
|
||||
|
||||
This example finds all books with a price of 49.99 and outputs their titles.
|
||||
```go
|
||||
path := etree.MustCompilePath("./bookstore/book[p:price='49.99']/title")
|
||||
for _, e := range doc.FindElementsPath(path) {
|
||||
fmt.Println(e.Text())
|
||||
}
|
||||
```
|
||||
|
||||
Output:
|
||||
```
|
||||
XQuery Kick Start
|
||||
```
|
||||
|
||||
Note that this example uses the FindElementsPath function, which takes as an
|
||||
argument a pre-compiled path object. Use precompiled paths when you plan to
|
||||
search with the same path more than once.
|
||||
|
||||
### Other features
|
||||
|
||||
These are just a few examples of the things the etree package can do. See the
|
||||
[documentation](http://godoc.org/github.com/beevik/etree) for a complete
|
||||
description of its capabilities.
|
||||
|
||||
### Contributing
|
||||
|
||||
This project accepts contributions. Just fork the repo and submit a pull
|
||||
request!
|
||||
109
vendor/github.com/beevik/etree/RELEASE_NOTES.md
generated
vendored
109
vendor/github.com/beevik/etree/RELEASE_NOTES.md
generated
vendored
|
|
@ -1,109 +0,0 @@
|
|||
Release v1.1.0
|
||||
==============
|
||||
|
||||
**New Features**
|
||||
|
||||
* New attribute helpers.
|
||||
* Added the `Element.SortAttrs` method, which lexicographically sorts an
|
||||
element's attributes by key.
|
||||
* New `ReadSettings` properties.
|
||||
* Added `Entity` for the support of custom entity maps.
|
||||
* New `WriteSettings` properties.
|
||||
* Added `UseCRLF` to allow the output of CR-LF newlines instead of the
|
||||
default LF newlines. This is useful on Windows systems.
|
||||
* Additional support for text and CDATA sections.
|
||||
* The `Element.Text` method now returns the concatenation of all consecutive
|
||||
character data tokens immediately following an element's opening tag.
|
||||
* Added `Element.SetCData` to replace the character data immediately
|
||||
following an element's opening tag with a CDATA section.
|
||||
* Added `Element.CreateCData` to create and add a CDATA section child
|
||||
`CharData` token to an element.
|
||||
* Added `Element.CreateText` to create and add a child text `CharData` token
|
||||
to an element.
|
||||
* Added `NewCData` to create a parentless CDATA section `CharData` token.
|
||||
* Added `NewText` to create a parentless text `CharData`
|
||||
token.
|
||||
* Added `CharData.IsCData` to detect if the token contains a CDATA section.
|
||||
* Added `CharData.IsWhitespace` to detect if the token contains whitespace
|
||||
inserted by one of the document Indent functions.
|
||||
* Modified `Element.SetText` so that it replaces a run of consecutive
|
||||
character data tokens following the element's opening tag (instead of just
|
||||
the first one).
|
||||
* New "tail text" support.
|
||||
* Added the `Element.Tail` method, which returns the text immediately
|
||||
following an element's closing tag.
|
||||
* Added the `Element.SetTail` method, which modifies the text immediately
|
||||
following an element's closing tag.
|
||||
* New element child insertion and removal methods.
|
||||
* Added the `Element.InsertChildAt` method, which inserts a new child token
|
||||
before the specified child token index.
|
||||
* Added the `Element.RemoveChildAt` method, which removes the child token at
|
||||
the specified child token index.
|
||||
* New element and attribute queries.
|
||||
* Added the `Element.Index` method, which returns the element's index within
|
||||
its parent element's child token list.
|
||||
* Added the `Element.NamespaceURI` method to return the namespace URI
|
||||
associated with an element.
|
||||
* Added the `Attr.NamespaceURI` method to return the namespace URI
|
||||
associated with an element.
|
||||
* Added the `Attr.Element` method to return the element that an attribute
|
||||
belongs to.
|
||||
* New Path filter functions.
|
||||
* Added `[local-name()='val']` to keep elements whose unprefixed tag matches
|
||||
the desired value.
|
||||
* Added `[name()='val']` to keep elements whose full tag matches the desired
|
||||
value.
|
||||
* Added `[namespace-prefix()='val']` to keep elements whose namespace prefix
|
||||
matches the desired value.
|
||||
* Added `[namespace-uri()='val']` to keep elements whose namespace URI
|
||||
matches the desired value.
|
||||
|
||||
**Bug Fixes**
|
||||
|
||||
* A default XML `CharSetReader` is now used to prevent failed parsing of XML
|
||||
documents using certain encodings.
|
||||
([Issue](https://github.com/beevik/etree/issues/53)).
|
||||
* All characters are now properly escaped according to XML parsing rules.
|
||||
([Issue](https://github.com/beevik/etree/issues/55)).
|
||||
* The `Document.Indent` and `Document.IndentTabs` functions no longer insert
|
||||
empty string `CharData` tokens.
|
||||
|
||||
**Deprecated**
|
||||
|
||||
* `Element`
|
||||
* The `InsertChild` method is deprecated. Use `InsertChildAt` instead.
|
||||
* The `CreateCharData` method is deprecated. Use `CreateText` instead.
|
||||
* `CharData`
|
||||
* The `NewCharData` method is deprecated. Use `NewText` instead.
|
||||
|
||||
|
||||
Release v1.0.1
|
||||
==============
|
||||
|
||||
**Changes**
|
||||
|
||||
* Added support for absolute etree Path queries. An absolute path begins with
|
||||
`/` or `//` and begins its search from the element's document root.
|
||||
* Added [`GetPath`](https://godoc.org/github.com/beevik/etree#Element.GetPath)
|
||||
and [`GetRelativePath`](https://godoc.org/github.com/beevik/etree#Element.GetRelativePath)
|
||||
functions to the [`Element`](https://godoc.org/github.com/beevik/etree#Element)
|
||||
type.
|
||||
|
||||
**Breaking changes**
|
||||
|
||||
* A path starting with `//` is now interpreted as an absolute path.
|
||||
Previously, it was interpreted as a relative path starting from the element
|
||||
whose
|
||||
[`FindElement`](https://godoc.org/github.com/beevik/etree#Element.FindElement)
|
||||
method was called. To remain compatible with this release, all paths
|
||||
prefixed with `//` should be prefixed with `.//` when called from any
|
||||
element other than the document's root.
|
||||
* [**edit 2/1/2019**]: Minor releases should not contain breaking changes.
|
||||
Even though this breaking change was very minor, it was a mistake to include
|
||||
it in this minor release. In the future, all breaking changes will be
|
||||
limited to major releases (e.g., version 2.0.0).
|
||||
|
||||
Release v1.0.0
|
||||
==============
|
||||
|
||||
Initial release.
|
||||
1453
vendor/github.com/beevik/etree/etree.go
generated
vendored
1453
vendor/github.com/beevik/etree/etree.go
generated
vendored
File diff suppressed because it is too large
Load diff
276
vendor/github.com/beevik/etree/helpers.go
generated
vendored
276
vendor/github.com/beevik/etree/helpers.go
generated
vendored
|
|
@ -1,276 +0,0 @@
|
|||
// Copyright 2015-2019 Brett Vickers.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package etree
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// A simple stack
|
||||
type stack struct {
|
||||
data []interface{}
|
||||
}
|
||||
|
||||
func (s *stack) empty() bool {
|
||||
return len(s.data) == 0
|
||||
}
|
||||
|
||||
func (s *stack) push(value interface{}) {
|
||||
s.data = append(s.data, value)
|
||||
}
|
||||
|
||||
func (s *stack) pop() interface{} {
|
||||
value := s.data[len(s.data)-1]
|
||||
s.data[len(s.data)-1] = nil
|
||||
s.data = s.data[:len(s.data)-1]
|
||||
return value
|
||||
}
|
||||
|
||||
func (s *stack) peek() interface{} {
|
||||
return s.data[len(s.data)-1]
|
||||
}
|
||||
|
||||
// A fifo is a simple first-in-first-out queue.
|
||||
type fifo struct {
|
||||
data []interface{}
|
||||
head, tail int
|
||||
}
|
||||
|
||||
func (f *fifo) add(value interface{}) {
|
||||
if f.len()+1 >= len(f.data) {
|
||||
f.grow()
|
||||
}
|
||||
f.data[f.tail] = value
|
||||
if f.tail++; f.tail == len(f.data) {
|
||||
f.tail = 0
|
||||
}
|
||||
}
|
||||
|
||||
func (f *fifo) remove() interface{} {
|
||||
value := f.data[f.head]
|
||||
f.data[f.head] = nil
|
||||
if f.head++; f.head == len(f.data) {
|
||||
f.head = 0
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
func (f *fifo) len() int {
|
||||
if f.tail >= f.head {
|
||||
return f.tail - f.head
|
||||
}
|
||||
return len(f.data) - f.head + f.tail
|
||||
}
|
||||
|
||||
func (f *fifo) grow() {
|
||||
c := len(f.data) * 2
|
||||
if c == 0 {
|
||||
c = 4
|
||||
}
|
||||
buf, count := make([]interface{}, c), f.len()
|
||||
if f.tail >= f.head {
|
||||
copy(buf[0:count], f.data[f.head:f.tail])
|
||||
} else {
|
||||
hindex := len(f.data) - f.head
|
||||
copy(buf[0:hindex], f.data[f.head:])
|
||||
copy(buf[hindex:count], f.data[:f.tail])
|
||||
}
|
||||
f.data, f.head, f.tail = buf, 0, count
|
||||
}
|
||||
|
||||
// countReader implements a proxy reader that counts the number of
|
||||
// bytes read from its encapsulated reader.
|
||||
type countReader struct {
|
||||
r io.Reader
|
||||
bytes int64
|
||||
}
|
||||
|
||||
func newCountReader(r io.Reader) *countReader {
|
||||
return &countReader{r: r}
|
||||
}
|
||||
|
||||
func (cr *countReader) Read(p []byte) (n int, err error) {
|
||||
b, err := cr.r.Read(p)
|
||||
cr.bytes += int64(b)
|
||||
return b, err
|
||||
}
|
||||
|
||||
// countWriter implements a proxy writer that counts the number of
|
||||
// bytes written by its encapsulated writer.
|
||||
type countWriter struct {
|
||||
w io.Writer
|
||||
bytes int64
|
||||
}
|
||||
|
||||
func newCountWriter(w io.Writer) *countWriter {
|
||||
return &countWriter{w: w}
|
||||
}
|
||||
|
||||
func (cw *countWriter) Write(p []byte) (n int, err error) {
|
||||
b, err := cw.w.Write(p)
|
||||
cw.bytes += int64(b)
|
||||
return b, err
|
||||
}
|
||||
|
||||
// isWhitespace returns true if the byte slice contains only
|
||||
// whitespace characters.
|
||||
func isWhitespace(s string) bool {
|
||||
for i := 0; i < len(s); i++ {
|
||||
if c := s[i]; c != ' ' && c != '\t' && c != '\n' && c != '\r' {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// spaceMatch returns true if namespace a is the empty string
|
||||
// or if namespace a equals namespace b.
|
||||
func spaceMatch(a, b string) bool {
|
||||
switch {
|
||||
case a == "":
|
||||
return true
|
||||
default:
|
||||
return a == b
|
||||
}
|
||||
}
|
||||
|
||||
// spaceDecompose breaks a namespace:tag identifier at the ':'
|
||||
// and returns the two parts.
|
||||
func spaceDecompose(str string) (space, key string) {
|
||||
colon := strings.IndexByte(str, ':')
|
||||
if colon == -1 {
|
||||
return "", str
|
||||
}
|
||||
return str[:colon], str[colon+1:]
|
||||
}
|
||||
|
||||
// Strings used by indentCRLF and indentLF
|
||||
const (
|
||||
indentSpaces = "\r\n "
|
||||
indentTabs = "\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t"
|
||||
)
|
||||
|
||||
// indentCRLF returns a CRLF newline followed by n copies of the first
|
||||
// non-CRLF character in the source string.
|
||||
func indentCRLF(n int, source string) string {
|
||||
switch {
|
||||
case n < 0:
|
||||
return source[:2]
|
||||
case n < len(source)-1:
|
||||
return source[:n+2]
|
||||
default:
|
||||
return source + strings.Repeat(source[2:3], n-len(source)+2)
|
||||
}
|
||||
}
|
||||
|
||||
// indentLF returns a LF newline followed by n copies of the first non-LF
|
||||
// character in the source string.
|
||||
func indentLF(n int, source string) string {
|
||||
switch {
|
||||
case n < 0:
|
||||
return source[1:2]
|
||||
case n < len(source)-1:
|
||||
return source[1 : n+2]
|
||||
default:
|
||||
return source[1:] + strings.Repeat(source[2:3], n-len(source)+2)
|
||||
}
|
||||
}
|
||||
|
||||
// nextIndex returns the index of the next occurrence of sep in s,
|
||||
// starting from offset. It returns -1 if the sep string is not found.
|
||||
func nextIndex(s, sep string, offset int) int {
|
||||
switch i := strings.Index(s[offset:], sep); i {
|
||||
case -1:
|
||||
return -1
|
||||
default:
|
||||
return offset + i
|
||||
}
|
||||
}
|
||||
|
||||
// isInteger returns true if the string s contains an integer.
|
||||
func isInteger(s string) bool {
|
||||
for i := 0; i < len(s); i++ {
|
||||
if (s[i] < '0' || s[i] > '9') && !(i == 0 && s[i] == '-') {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
type escapeMode byte
|
||||
|
||||
const (
|
||||
escapeNormal escapeMode = iota
|
||||
escapeCanonicalText
|
||||
escapeCanonicalAttr
|
||||
)
|
||||
|
||||
// escapeString writes an escaped version of a string to the writer.
|
||||
func escapeString(w *bufio.Writer, s string, m escapeMode) {
|
||||
var esc []byte
|
||||
last := 0
|
||||
for i := 0; i < len(s); {
|
||||
r, width := utf8.DecodeRuneInString(s[i:])
|
||||
i += width
|
||||
switch r {
|
||||
case '&':
|
||||
esc = []byte("&")
|
||||
case '<':
|
||||
esc = []byte("<")
|
||||
case '>':
|
||||
if m == escapeCanonicalAttr {
|
||||
continue
|
||||
}
|
||||
esc = []byte(">")
|
||||
case '\'':
|
||||
if m != escapeNormal {
|
||||
continue
|
||||
}
|
||||
esc = []byte("'")
|
||||
case '"':
|
||||
if m == escapeCanonicalText {
|
||||
continue
|
||||
}
|
||||
esc = []byte(""")
|
||||
case '\t':
|
||||
if m != escapeCanonicalAttr {
|
||||
continue
|
||||
}
|
||||
esc = []byte("	")
|
||||
case '\n':
|
||||
if m != escapeCanonicalAttr {
|
||||
continue
|
||||
}
|
||||
esc = []byte("
")
|
||||
case '\r':
|
||||
if m == escapeNormal {
|
||||
continue
|
||||
}
|
||||
esc = []byte("
")
|
||||
default:
|
||||
if !isInCharacterRange(r) || (r == 0xFFFD && width == 1) {
|
||||
esc = []byte("\uFFFD")
|
||||
break
|
||||
}
|
||||
continue
|
||||
}
|
||||
w.WriteString(s[last : i-width])
|
||||
w.Write(esc)
|
||||
last = i
|
||||
}
|
||||
w.WriteString(s[last:])
|
||||
}
|
||||
|
||||
func isInCharacterRange(r rune) bool {
|
||||
return r == 0x09 ||
|
||||
r == 0x0A ||
|
||||
r == 0x0D ||
|
||||
r >= 0x20 && r <= 0xD7FF ||
|
||||
r >= 0xE000 && r <= 0xFFFD ||
|
||||
r >= 0x10000 && r <= 0x10FFFF
|
||||
}
|
||||
582
vendor/github.com/beevik/etree/path.go
generated
vendored
582
vendor/github.com/beevik/etree/path.go
generated
vendored
|
|
@ -1,582 +0,0 @@
|
|||
// Copyright 2015-2019 Brett Vickers.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package etree
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
/*
|
||||
A Path is a string that represents a search path through an etree starting
|
||||
from the document root or an arbitrary element. Paths are used with the
|
||||
Element object's Find* methods to locate and return desired elements.
|
||||
|
||||
A Path consists of a series of slash-separated "selectors", each of which may
|
||||
be modified by one or more bracket-enclosed "filters". Selectors are used to
|
||||
traverse the etree from element to element, while filters are used to narrow
|
||||
the list of candidate elements at each node.
|
||||
|
||||
Although etree Path strings are similar to XPath strings
|
||||
(https://www.w3.org/TR/1999/REC-xpath-19991116/), they have a more limited set
|
||||
of selectors and filtering options.
|
||||
|
||||
The following selectors are supported by etree Path strings:
|
||||
|
||||
. Select the current element.
|
||||
.. Select the parent of the current element.
|
||||
* Select all child elements of the current element.
|
||||
/ Select the root element when used at the start of a path.
|
||||
// Select all descendants of the current element.
|
||||
tag Select all child elements with a name matching the tag.
|
||||
|
||||
The following basic filters are supported by etree Path strings:
|
||||
|
||||
[@attrib] Keep elements with an attribute named attrib.
|
||||
[@attrib='val'] Keep elements with an attribute named attrib and value matching val.
|
||||
[tag] Keep elements with a child element named tag.
|
||||
[tag='val'] Keep elements with a child element named tag and text matching val.
|
||||
[n] Keep the n-th element, where n is a numeric index starting from 1.
|
||||
|
||||
The following function filters are also supported:
|
||||
|
||||
[text()] Keep elements with non-empty text.
|
||||
[text()='val'] Keep elements whose text matches val.
|
||||
[local-name()='val'] Keep elements whose un-prefixed tag matches val.
|
||||
[name()='val'] Keep elements whose full tag exactly matches val.
|
||||
[namespace-prefix()='val'] Keep elements whose namespace prefix matches val.
|
||||
[namespace-uri()='val'] Keep elements whose namespace URI matches val.
|
||||
|
||||
Here are some examples of Path strings:
|
||||
|
||||
- Select the bookstore child element of the root element:
|
||||
/bookstore
|
||||
|
||||
- Beginning from the root element, select the title elements of all
|
||||
descendant book elements having a 'category' attribute of 'WEB':
|
||||
//book[@category='WEB']/title
|
||||
|
||||
- Beginning from the current element, select the first descendant
|
||||
book element with a title child element containing the text 'Great
|
||||
Expectations':
|
||||
.//book[title='Great Expectations'][1]
|
||||
|
||||
- Beginning from the current element, select all child elements of
|
||||
book elements with an attribute 'language' set to 'english':
|
||||
./book/*[@language='english']
|
||||
|
||||
- Beginning from the current element, select all child elements of
|
||||
book elements containing the text 'special':
|
||||
./book/*[text()='special']
|
||||
|
||||
- Beginning from the current element, select all descendant book
|
||||
elements whose title child element has a 'language' attribute of 'french':
|
||||
.//book/title[@language='french']/..
|
||||
|
||||
- Beginning from the current element, select all book elements
|
||||
belonging to the http://www.w3.org/TR/html4/ namespace:
|
||||
.//book[namespace-uri()='http://www.w3.org/TR/html4/']
|
||||
|
||||
*/
|
||||
type Path struct {
|
||||
segments []segment
|
||||
}
|
||||
|
||||
// ErrPath is returned by path functions when an invalid etree path is provided.
|
||||
type ErrPath string
|
||||
|
||||
// Error returns the string describing a path error.
|
||||
func (err ErrPath) Error() string {
|
||||
return "etree: " + string(err)
|
||||
}
|
||||
|
||||
// CompilePath creates an optimized version of an XPath-like string that
|
||||
// can be used to query elements in an element tree.
|
||||
func CompilePath(path string) (Path, error) {
|
||||
var comp compiler
|
||||
segments := comp.parsePath(path)
|
||||
if comp.err != ErrPath("") {
|
||||
return Path{nil}, comp.err
|
||||
}
|
||||
return Path{segments}, nil
|
||||
}
|
||||
|
||||
// MustCompilePath creates an optimized version of an XPath-like string that
|
||||
// can be used to query elements in an element tree. Panics if an error
|
||||
// occurs. Use this function to create Paths when you know the path is
|
||||
// valid (i.e., if it's hard-coded).
|
||||
func MustCompilePath(path string) Path {
|
||||
p, err := CompilePath(path)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// A segment is a portion of a path between "/" characters.
|
||||
// It contains one selector and zero or more [filters].
|
||||
type segment struct {
|
||||
sel selector
|
||||
filters []filter
|
||||
}
|
||||
|
||||
func (seg *segment) apply(e *Element, p *pather) {
|
||||
seg.sel.apply(e, p)
|
||||
for _, f := range seg.filters {
|
||||
f.apply(p)
|
||||
}
|
||||
}
|
||||
|
||||
// A selector selects XML elements for consideration by the
|
||||
// path traversal.
|
||||
type selector interface {
|
||||
apply(e *Element, p *pather)
|
||||
}
|
||||
|
||||
// A filter pares down a list of candidate XML elements based
|
||||
// on a path filter in [brackets].
|
||||
type filter interface {
|
||||
apply(p *pather)
|
||||
}
|
||||
|
||||
// A pather is helper object that traverses an element tree using
|
||||
// a Path object. It collects and deduplicates all elements matching
|
||||
// the path query.
|
||||
type pather struct {
|
||||
queue fifo
|
||||
results []*Element
|
||||
inResults map[*Element]bool
|
||||
candidates []*Element
|
||||
scratch []*Element // used by filters
|
||||
}
|
||||
|
||||
// A node represents an element and the remaining path segments that
|
||||
// should be applied against it by the pather.
|
||||
type node struct {
|
||||
e *Element
|
||||
segments []segment
|
||||
}
|
||||
|
||||
func newPather() *pather {
|
||||
return &pather{
|
||||
results: make([]*Element, 0),
|
||||
inResults: make(map[*Element]bool),
|
||||
candidates: make([]*Element, 0),
|
||||
scratch: make([]*Element, 0),
|
||||
}
|
||||
}
|
||||
|
||||
// traverse follows the path from the element e, collecting
|
||||
// and then returning all elements that match the path's selectors
|
||||
// and filters.
|
||||
func (p *pather) traverse(e *Element, path Path) []*Element {
|
||||
for p.queue.add(node{e, path.segments}); p.queue.len() > 0; {
|
||||
p.eval(p.queue.remove().(node))
|
||||
}
|
||||
return p.results
|
||||
}
|
||||
|
||||
// eval evalutes the current path node by applying the remaining
|
||||
// path's selector rules against the node's element.
|
||||
func (p *pather) eval(n node) {
|
||||
p.candidates = p.candidates[0:0]
|
||||
seg, remain := n.segments[0], n.segments[1:]
|
||||
seg.apply(n.e, p)
|
||||
|
||||
if len(remain) == 0 {
|
||||
for _, c := range p.candidates {
|
||||
if in := p.inResults[c]; !in {
|
||||
p.inResults[c] = true
|
||||
p.results = append(p.results, c)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for _, c := range p.candidates {
|
||||
p.queue.add(node{c, remain})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// A compiler generates a compiled path from a path string.
|
||||
type compiler struct {
|
||||
err ErrPath
|
||||
}
|
||||
|
||||
// parsePath parses an XPath-like string describing a path
|
||||
// through an element tree and returns a slice of segment
|
||||
// descriptors.
|
||||
func (c *compiler) parsePath(path string) []segment {
|
||||
// If path ends with //, fix it
|
||||
if strings.HasSuffix(path, "//") {
|
||||
path = path + "*"
|
||||
}
|
||||
|
||||
var segments []segment
|
||||
|
||||
// Check for an absolute path
|
||||
if strings.HasPrefix(path, "/") {
|
||||
segments = append(segments, segment{new(selectRoot), []filter{}})
|
||||
path = path[1:]
|
||||
}
|
||||
|
||||
// Split path into segments
|
||||
for _, s := range splitPath(path) {
|
||||
segments = append(segments, c.parseSegment(s))
|
||||
if c.err != ErrPath("") {
|
||||
break
|
||||
}
|
||||
}
|
||||
return segments
|
||||
}
|
||||
|
||||
func splitPath(path string) []string {
|
||||
pieces := make([]string, 0)
|
||||
start := 0
|
||||
inquote := false
|
||||
for i := 0; i+1 <= len(path); i++ {
|
||||
if path[i] == '\'' {
|
||||
inquote = !inquote
|
||||
} else if path[i] == '/' && !inquote {
|
||||
pieces = append(pieces, path[start:i])
|
||||
start = i + 1
|
||||
}
|
||||
}
|
||||
return append(pieces, path[start:])
|
||||
}
|
||||
|
||||
// parseSegment parses a path segment between / characters.
|
||||
func (c *compiler) parseSegment(path string) segment {
|
||||
pieces := strings.Split(path, "[")
|
||||
seg := segment{
|
||||
sel: c.parseSelector(pieces[0]),
|
||||
filters: []filter{},
|
||||
}
|
||||
for i := 1; i < len(pieces); i++ {
|
||||
fpath := pieces[i]
|
||||
if fpath[len(fpath)-1] != ']' {
|
||||
c.err = ErrPath("path has invalid filter [brackets].")
|
||||
break
|
||||
}
|
||||
seg.filters = append(seg.filters, c.parseFilter(fpath[:len(fpath)-1]))
|
||||
}
|
||||
return seg
|
||||
}
|
||||
|
||||
// parseSelector parses a selector at the start of a path segment.
|
||||
func (c *compiler) parseSelector(path string) selector {
|
||||
switch path {
|
||||
case ".":
|
||||
return new(selectSelf)
|
||||
case "..":
|
||||
return new(selectParent)
|
||||
case "*":
|
||||
return new(selectChildren)
|
||||
case "":
|
||||
return new(selectDescendants)
|
||||
default:
|
||||
return newSelectChildrenByTag(path)
|
||||
}
|
||||
}
|
||||
|
||||
var fnTable = map[string]struct {
|
||||
hasFn func(e *Element) bool
|
||||
getValFn func(e *Element) string
|
||||
}{
|
||||
"local-name": {nil, (*Element).name},
|
||||
"name": {nil, (*Element).FullTag},
|
||||
"namespace-prefix": {nil, (*Element).namespacePrefix},
|
||||
"namespace-uri": {nil, (*Element).NamespaceURI},
|
||||
"text": {(*Element).hasText, (*Element).Text},
|
||||
}
|
||||
|
||||
// parseFilter parses a path filter contained within [brackets].
|
||||
func (c *compiler) parseFilter(path string) filter {
|
||||
if len(path) == 0 {
|
||||
c.err = ErrPath("path contains an empty filter expression.")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Filter contains [@attr='val'], [fn()='val'], or [tag='val']?
|
||||
eqindex := strings.Index(path, "='")
|
||||
if eqindex >= 0 {
|
||||
rindex := nextIndex(path, "'", eqindex+2)
|
||||
if rindex != len(path)-1 {
|
||||
c.err = ErrPath("path has mismatched filter quotes.")
|
||||
return nil
|
||||
}
|
||||
|
||||
key := path[:eqindex]
|
||||
value := path[eqindex+2 : rindex]
|
||||
|
||||
switch {
|
||||
case key[0] == '@':
|
||||
return newFilterAttrVal(key[1:], value)
|
||||
case strings.HasSuffix(key, "()"):
|
||||
fn := key[:len(key)-2]
|
||||
if t, ok := fnTable[fn]; ok && t.getValFn != nil {
|
||||
return newFilterFuncVal(t.getValFn, value)
|
||||
}
|
||||
c.err = ErrPath("path has unknown function " + fn)
|
||||
return nil
|
||||
default:
|
||||
return newFilterChildText(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
// Filter contains [@attr], [N], [tag] or [fn()]
|
||||
switch {
|
||||
case path[0] == '@':
|
||||
return newFilterAttr(path[1:])
|
||||
case strings.HasSuffix(path, "()"):
|
||||
fn := path[:len(path)-2]
|
||||
if t, ok := fnTable[fn]; ok && t.hasFn != nil {
|
||||
return newFilterFunc(t.hasFn)
|
||||
}
|
||||
c.err = ErrPath("path has unknown function " + fn)
|
||||
return nil
|
||||
case isInteger(path):
|
||||
pos, _ := strconv.Atoi(path)
|
||||
switch {
|
||||
case pos > 0:
|
||||
return newFilterPos(pos - 1)
|
||||
default:
|
||||
return newFilterPos(pos)
|
||||
}
|
||||
default:
|
||||
return newFilterChild(path)
|
||||
}
|
||||
}
|
||||
|
||||
// selectSelf selects the current element into the candidate list.
|
||||
type selectSelf struct{}
|
||||
|
||||
func (s *selectSelf) apply(e *Element, p *pather) {
|
||||
p.candidates = append(p.candidates, e)
|
||||
}
|
||||
|
||||
// selectRoot selects the element's root node.
|
||||
type selectRoot struct{}
|
||||
|
||||
func (s *selectRoot) apply(e *Element, p *pather) {
|
||||
root := e
|
||||
for root.parent != nil {
|
||||
root = root.parent
|
||||
}
|
||||
p.candidates = append(p.candidates, root)
|
||||
}
|
||||
|
||||
// selectParent selects the element's parent into the candidate list.
|
||||
type selectParent struct{}
|
||||
|
||||
func (s *selectParent) apply(e *Element, p *pather) {
|
||||
if e.parent != nil {
|
||||
p.candidates = append(p.candidates, e.parent)
|
||||
}
|
||||
}
|
||||
|
||||
// selectChildren selects the element's child elements into the
|
||||
// candidate list.
|
||||
type selectChildren struct{}
|
||||
|
||||
func (s *selectChildren) apply(e *Element, p *pather) {
|
||||
for _, c := range e.Child {
|
||||
if c, ok := c.(*Element); ok {
|
||||
p.candidates = append(p.candidates, c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// selectDescendants selects all descendant child elements
|
||||
// of the element into the candidate list.
|
||||
type selectDescendants struct{}
|
||||
|
||||
func (s *selectDescendants) apply(e *Element, p *pather) {
|
||||
var queue fifo
|
||||
for queue.add(e); queue.len() > 0; {
|
||||
e := queue.remove().(*Element)
|
||||
p.candidates = append(p.candidates, e)
|
||||
for _, c := range e.Child {
|
||||
if c, ok := c.(*Element); ok {
|
||||
queue.add(c)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// selectChildrenByTag selects into the candidate list all child
|
||||
// elements of the element having the specified tag.
|
||||
type selectChildrenByTag struct {
|
||||
space, tag string
|
||||
}
|
||||
|
||||
func newSelectChildrenByTag(path string) *selectChildrenByTag {
|
||||
s, l := spaceDecompose(path)
|
||||
return &selectChildrenByTag{s, l}
|
||||
}
|
||||
|
||||
func (s *selectChildrenByTag) apply(e *Element, p *pather) {
|
||||
for _, c := range e.Child {
|
||||
if c, ok := c.(*Element); ok && spaceMatch(s.space, c.Space) && s.tag == c.Tag {
|
||||
p.candidates = append(p.candidates, c)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// filterPos filters the candidate list, keeping only the
|
||||
// candidate at the specified index.
|
||||
type filterPos struct {
|
||||
index int
|
||||
}
|
||||
|
||||
func newFilterPos(pos int) *filterPos {
|
||||
return &filterPos{pos}
|
||||
}
|
||||
|
||||
func (f *filterPos) apply(p *pather) {
|
||||
if f.index >= 0 {
|
||||
if f.index < len(p.candidates) {
|
||||
p.scratch = append(p.scratch, p.candidates[f.index])
|
||||
}
|
||||
} else {
|
||||
if -f.index <= len(p.candidates) {
|
||||
p.scratch = append(p.scratch, p.candidates[len(p.candidates)+f.index])
|
||||
}
|
||||
}
|
||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
||||
}
|
||||
|
||||
// filterAttr filters the candidate list for elements having
|
||||
// the specified attribute.
|
||||
type filterAttr struct {
|
||||
space, key string
|
||||
}
|
||||
|
||||
func newFilterAttr(str string) *filterAttr {
|
||||
s, l := spaceDecompose(str)
|
||||
return &filterAttr{s, l}
|
||||
}
|
||||
|
||||
func (f *filterAttr) apply(p *pather) {
|
||||
for _, c := range p.candidates {
|
||||
for _, a := range c.Attr {
|
||||
if spaceMatch(f.space, a.Space) && f.key == a.Key {
|
||||
p.scratch = append(p.scratch, c)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
||||
}
|
||||
|
||||
// filterAttrVal filters the candidate list for elements having
|
||||
// the specified attribute with the specified value.
|
||||
type filterAttrVal struct {
|
||||
space, key, val string
|
||||
}
|
||||
|
||||
func newFilterAttrVal(str, value string) *filterAttrVal {
|
||||
s, l := spaceDecompose(str)
|
||||
return &filterAttrVal{s, l, value}
|
||||
}
|
||||
|
||||
func (f *filterAttrVal) apply(p *pather) {
|
||||
for _, c := range p.candidates {
|
||||
for _, a := range c.Attr {
|
||||
if spaceMatch(f.space, a.Space) && f.key == a.Key && f.val == a.Value {
|
||||
p.scratch = append(p.scratch, c)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
||||
}
|
||||
|
||||
// filterFunc filters the candidate list for elements satisfying a custom
|
||||
// boolean function.
|
||||
type filterFunc struct {
|
||||
fn func(e *Element) bool
|
||||
}
|
||||
|
||||
func newFilterFunc(fn func(e *Element) bool) *filterFunc {
|
||||
return &filterFunc{fn}
|
||||
}
|
||||
|
||||
func (f *filterFunc) apply(p *pather) {
|
||||
for _, c := range p.candidates {
|
||||
if f.fn(c) {
|
||||
p.scratch = append(p.scratch, c)
|
||||
}
|
||||
}
|
||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
||||
}
|
||||
|
||||
// filterFuncVal filters the candidate list for elements containing a value
|
||||
// matching the result of a custom function.
|
||||
type filterFuncVal struct {
|
||||
fn func(e *Element) string
|
||||
val string
|
||||
}
|
||||
|
||||
func newFilterFuncVal(fn func(e *Element) string, value string) *filterFuncVal {
|
||||
return &filterFuncVal{fn, value}
|
||||
}
|
||||
|
||||
func (f *filterFuncVal) apply(p *pather) {
|
||||
for _, c := range p.candidates {
|
||||
if f.fn(c) == f.val {
|
||||
p.scratch = append(p.scratch, c)
|
||||
}
|
||||
}
|
||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
||||
}
|
||||
|
||||
// filterChild filters the candidate list for elements having
|
||||
// a child element with the specified tag.
|
||||
type filterChild struct {
|
||||
space, tag string
|
||||
}
|
||||
|
||||
func newFilterChild(str string) *filterChild {
|
||||
s, l := spaceDecompose(str)
|
||||
return &filterChild{s, l}
|
||||
}
|
||||
|
||||
func (f *filterChild) apply(p *pather) {
|
||||
for _, c := range p.candidates {
|
||||
for _, cc := range c.Child {
|
||||
if cc, ok := cc.(*Element); ok &&
|
||||
spaceMatch(f.space, cc.Space) &&
|
||||
f.tag == cc.Tag {
|
||||
p.scratch = append(p.scratch, c)
|
||||
}
|
||||
}
|
||||
}
|
||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
||||
}
|
||||
|
||||
// filterChildText filters the candidate list for elements having
|
||||
// a child element with the specified tag and text.
|
||||
type filterChildText struct {
|
||||
space, tag, text string
|
||||
}
|
||||
|
||||
func newFilterChildText(str, text string) *filterChildText {
|
||||
s, l := spaceDecompose(str)
|
||||
return &filterChildText{s, l, text}
|
||||
}
|
||||
|
||||
func (f *filterChildText) apply(p *pather) {
|
||||
for _, c := range p.candidates {
|
||||
for _, cc := range c.Child {
|
||||
if cc, ok := cc.(*Element); ok &&
|
||||
spaceMatch(f.space, cc.Space) &&
|
||||
f.tag == cc.Tag &&
|
||||
f.text == cc.Text() {
|
||||
p.scratch = append(p.scratch, c)
|
||||
}
|
||||
}
|
||||
}
|
||||
p.candidates, p.scratch = p.scratch, p.candidates[0:0]
|
||||
}
|
||||
4
vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
generated
vendored
Normal file
4
vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
.vscode
|
||||
.idea
|
||||
*.swp
|
||||
jv
|
||||
175
vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE
generated
vendored
Normal file
175
vendor/github.com/santhosh-tekuri/jsonschema/v5/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
215
vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
generated
vendored
Normal file
215
vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,215 @@
|
|||
# jsonschema v5.1.1
|
||||
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
|
||||
[](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema/v5)
|
||||
[](https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml)
|
||||
[](https://codecov.io/github/santhosh-tekuri/jsonschema?branch=master)
|
||||
|
||||
Package jsonschema provides json-schema compilation and validation.
|
||||
|
||||
[Benchmarks](https://dev.to/vearutop/benchmarking-correctness-and-performance-of-go-json-schema-validators-3247)
|
||||
|
||||
### Features:
|
||||
- implements
|
||||
[draft 2020-12](https://json-schema.org/specification-links.html#2020-12),
|
||||
[draft 2019-09](https://json-schema.org/specification-links.html#draft-2019-09-formerly-known-as-draft-8),
|
||||
[draft-7](https://json-schema.org/specification-links.html#draft-7),
|
||||
[draft-6](https://json-schema.org/specification-links.html#draft-6),
|
||||
[draft-4](https://json-schema.org/specification-links.html#draft-4)
|
||||
- fully compliant with [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite), (excluding some optional)
|
||||
- list of optional tests that are excluded can be found in schema_test.go(variable [skipTests](https://github.com/santhosh-tekuri/jsonschema/blob/master/schema_test.go#L24))
|
||||
- validates schemas against meta-schema
|
||||
- full support of remote references
|
||||
- support of recursive references between schemas
|
||||
- detects infinite loop in schemas
|
||||
- thread safe validation
|
||||
- rich, intuitive hierarchial error messages with json-pointers to exact location
|
||||
- supports output formats flag, basic and detailed
|
||||
- supports enabling format and content Assertions in draft2019-09 or above
|
||||
- change `Compiler.AssertFormat`, `Compiler.AssertContent` to `true`
|
||||
- compiled schema can be introspected. easier to develop tools like generating go structs given schema
|
||||
- supports user-defined keywords via [extensions](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-Extension)
|
||||
- implements following formats (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedFormat))
|
||||
- date-time, date, time, duration, period (supports leap-second)
|
||||
- uuid, hostname, email
|
||||
- ip-address, ipv4, ipv6
|
||||
- uri, uriref, uri-template(limited validation)
|
||||
- json-pointer, relative-json-pointer
|
||||
- regex, format
|
||||
- implements following contentEncoding (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
|
||||
- base64
|
||||
- implements following contentMediaType (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
|
||||
- application/json
|
||||
- can load from files/http/https/[string](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-FromString)/[]byte/io.Reader (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedLoader))
|
||||
|
||||
|
||||
see examples in [godoc](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
|
||||
|
||||
The schema is compiled against the version specified in `$schema` property.
|
||||
If "$schema" property is missing, it uses latest draft which currently implemented
|
||||
by this library.
|
||||
|
||||
You can force to use specific version, when `$schema` is missing, as follows:
|
||||
|
||||
```go
|
||||
compiler := jsonschema.NewCompiler()
|
||||
compiler.Draft = jsonschema.Draft4
|
||||
```
|
||||
|
||||
This package supports loading json-schema from filePath and fileURL.
|
||||
|
||||
To load json-schema from HTTPURL, add following import:
|
||||
|
||||
```go
|
||||
import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
```
|
||||
|
||||
## Rich Errors
|
||||
|
||||
The ValidationError returned by Validate method contains detailed context to understand why and where the error is.
|
||||
|
||||
schema.json:
|
||||
```json
|
||||
{
|
||||
"$ref": "t.json#/definitions/employee"
|
||||
}
|
||||
```
|
||||
|
||||
t.json:
|
||||
```json
|
||||
{
|
||||
"definitions": {
|
||||
"employee": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
doc.json:
|
||||
```json
|
||||
1
|
||||
```
|
||||
|
||||
assuming `err` is the ValidationError returned when `doc.json` validated with `schema.json`,
|
||||
```go
|
||||
fmt.Printf("%#v\n", err) // using %#v prints errors hierarchy
|
||||
```
|
||||
Prints:
|
||||
```
|
||||
[I#] [S#] doesn't validate with file:///Users/santhosh/jsonschema/schema.json#
|
||||
[I#] [S#/$ref] doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'
|
||||
[I#] [S#/definitions/employee/type] expected string, but got number
|
||||
```
|
||||
|
||||
Here `I` stands for instance document and `S` stands for schema document.
|
||||
The json-fragments that caused error in instance and schema documents are represented using json-pointer notation.
|
||||
Nested causes are printed with indent.
|
||||
|
||||
To output `err` in `flag` output format:
|
||||
```go
|
||||
b, _ := json.MarshalIndent(err.FlagOutput(), "", " ")
|
||||
fmt.Println(string(b))
|
||||
```
|
||||
Prints:
|
||||
```json
|
||||
{
|
||||
"valid": false
|
||||
}
|
||||
```
|
||||
To output `err` in `basic` output format:
|
||||
```go
|
||||
b, _ := json.MarshalIndent(err.BasicOutput(), "", " ")
|
||||
fmt.Println(string(b))
|
||||
```
|
||||
Prints:
|
||||
```json
|
||||
{
|
||||
"valid": false,
|
||||
"errors": [
|
||||
{
|
||||
"keywordLocation": "",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
|
||||
"instanceLocation": "",
|
||||
"error": "doesn't validate with file:///Users/santhosh/jsonschema/schema.json#"
|
||||
},
|
||||
{
|
||||
"keywordLocation": "/$ref",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
|
||||
"instanceLocation": "",
|
||||
"error": "doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'"
|
||||
},
|
||||
{
|
||||
"keywordLocation": "/$ref/type",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
|
||||
"instanceLocation": "",
|
||||
"error": "expected string, but got number"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
To output `err` in `detailed` output format:
|
||||
```go
|
||||
b, _ := json.MarshalIndent(err.DetailedOutput(), "", " ")
|
||||
fmt.Println(string(b))
|
||||
```
|
||||
Prints:
|
||||
```json
|
||||
{
|
||||
"valid": false,
|
||||
"keywordLocation": "",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
|
||||
"instanceLocation": "",
|
||||
"errors": [
|
||||
{
|
||||
"valid": false,
|
||||
"keywordLocation": "/$ref",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
|
||||
"instanceLocation": "",
|
||||
"errors": [
|
||||
{
|
||||
"valid": false,
|
||||
"keywordLocation": "/$ref/type",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
|
||||
"instanceLocation": "",
|
||||
"error": "expected string, but got number"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## CLI
|
||||
|
||||
to install `go install github.com/santhosh-tekuri/jsonschema/v5/cmd/jv@latest`
|
||||
|
||||
```bash
|
||||
jv [-draft INT] [-output FORMAT] [-assertformat] [-assertcontent] <json-schema> [<json-doc>]...
|
||||
-assertcontent
|
||||
enable content assertions with draft >= 2019
|
||||
-assertformat
|
||||
enable format assertions with draft >= 2019
|
||||
-draft int
|
||||
draft used when '$schema' attribute is missing. valid values 4, 5, 7, 2019, 2020 (default 2020)
|
||||
-output string
|
||||
output format. valid values flag, basic, detailed
|
||||
```
|
||||
|
||||
if no `<json-doc>` arguments are passed, it simply validates the `<json-schema>`.
|
||||
if `$schema` attribute is missing in schema, it uses latest version. this can be overridden by passing `-draft` flag
|
||||
|
||||
exit-code is 1, if there are any validation errors
|
||||
|
||||
## Validating YAML Documents
|
||||
|
||||
since yaml supports non-string keys, such yaml documents are rendered as invalid json documents.
|
||||
yaml parser returns `map[interface{}]interface{}` for object, whereas json parser returns `map[string]interface{}`.
|
||||
this package accepts only `map[string]interface{}`, so we need to manually convert them to `map[string]interface{}`
|
||||
|
||||
https://play.golang.org/p/Hhax3MrtD8r
|
||||
|
||||
the above example shows how to validate yaml document with jsonschema.
|
||||
the conversion explained above is implemented by `toStringKeys` function
|
||||
|
||||
771
vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
generated
vendored
Normal file
771
vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
generated
vendored
Normal file
|
|
@ -0,0 +1,771 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/big"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Compiler represents a json-schema compiler.
|
||||
type Compiler struct {
|
||||
// Draft represents the draft used when '$schema' attribute is missing.
|
||||
//
|
||||
// This defaults to latest supported draft (currently 2020-12).
|
||||
Draft *Draft
|
||||
resources map[string]*resource
|
||||
|
||||
// Extensions is used to register extensions.
|
||||
extensions map[string]extension
|
||||
|
||||
// ExtractAnnotations tells whether schema annotations has to be extracted
|
||||
// in compiled Schema or not.
|
||||
ExtractAnnotations bool
|
||||
|
||||
// LoadURL loads the document at given absolute URL.
|
||||
//
|
||||
// If nil, package global LoadURL is used.
|
||||
LoadURL func(s string) (io.ReadCloser, error)
|
||||
|
||||
// AssertFormat for specifications >= draft2019-09.
|
||||
AssertFormat bool
|
||||
|
||||
// AssertContent for specifications >= draft2019-09.
|
||||
AssertContent bool
|
||||
}
|
||||
|
||||
// Compile parses json-schema at given url returns, if successful,
|
||||
// a Schema object that can be used to match against json.
|
||||
//
|
||||
// Returned error can be *SchemaError
|
||||
func Compile(url string) (*Schema, error) {
|
||||
return NewCompiler().Compile(url)
|
||||
}
|
||||
|
||||
// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
|
||||
// It simplifies safe initialization of global variables holding compiled Schemas.
|
||||
func MustCompile(url string) *Schema {
|
||||
return NewCompiler().MustCompile(url)
|
||||
}
|
||||
|
||||
// CompileString parses and compiles the given schema with given base url.
|
||||
func CompileString(url, schema string) (*Schema, error) {
|
||||
c := NewCompiler()
|
||||
if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.Compile(url)
|
||||
}
|
||||
|
||||
// MustCompileString is like CompileString but panics on error.
|
||||
// It simplified safe initialization of global variables holding compiled Schema.
|
||||
func MustCompileString(url, schema string) *Schema {
|
||||
c := NewCompiler()
|
||||
if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return c.MustCompile(url)
|
||||
}
|
||||
|
||||
// NewCompiler returns a json-schema Compiler object.
|
||||
// if '$schema' attribute is missing, it is treated as draft7. to change this
|
||||
// behavior change Compiler.Draft value
|
||||
func NewCompiler() *Compiler {
|
||||
return &Compiler{Draft: latest, resources: make(map[string]*resource), extensions: make(map[string]extension)}
|
||||
}
|
||||
|
||||
// AddResource adds in-memory resource to the compiler.
|
||||
//
|
||||
// Note that url must not have fragment
|
||||
func (c *Compiler) AddResource(url string, r io.Reader) error {
|
||||
res, err := newResource(url, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.resources[res.url] = res
|
||||
return nil
|
||||
}
|
||||
|
||||
// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
|
||||
// It simplifies safe initialization of global variables holding compiled Schemas.
|
||||
func (c *Compiler) MustCompile(url string) *Schema {
|
||||
s, err := c.Compile(url)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("jsonschema: %#v", err))
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Compile parses json-schema at given url returns, if successful,
|
||||
// a Schema object that can be used to match against json.
|
||||
//
|
||||
// error returned will be of type *SchemaError
|
||||
func (c *Compiler) Compile(url string) (*Schema, error) {
|
||||
// make url absolute
|
||||
u, err := toAbs(url)
|
||||
if err != nil {
|
||||
return nil, &SchemaError{url, err}
|
||||
}
|
||||
url = u
|
||||
|
||||
sch, err := c.compileURL(url, nil, "#")
|
||||
if err != nil {
|
||||
err = &SchemaError{url, err}
|
||||
}
|
||||
return sch, err
|
||||
}
|
||||
|
||||
func (c *Compiler) findResource(url string) (*resource, error) {
|
||||
if _, ok := c.resources[url]; !ok {
|
||||
// load resource
|
||||
var rdr io.Reader
|
||||
if sch, ok := vocabSchemas[url]; ok {
|
||||
rdr = strings.NewReader(sch)
|
||||
} else {
|
||||
loadURL := LoadURL
|
||||
if c.LoadURL != nil {
|
||||
loadURL = c.LoadURL
|
||||
}
|
||||
r, err := loadURL(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer r.Close()
|
||||
rdr = r
|
||||
}
|
||||
if err := c.AddResource(url, rdr); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
r := c.resources[url]
|
||||
if r.draft != nil {
|
||||
return r, nil
|
||||
}
|
||||
|
||||
// set draft
|
||||
r.draft = c.Draft
|
||||
if m, ok := r.doc.(map[string]interface{}); ok {
|
||||
if sch, ok := m["$schema"]; ok {
|
||||
sch, ok := sch.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("jsonschema: invalid $schema in %s", url)
|
||||
}
|
||||
if !isURI(sch) {
|
||||
return nil, fmt.Errorf("jsonschema: $schema must be uri in %s", url)
|
||||
}
|
||||
r.draft = findDraft(sch)
|
||||
if r.draft == nil {
|
||||
sch, _ := split(sch)
|
||||
if sch == url {
|
||||
return nil, fmt.Errorf("jsonschema: unsupported draft in %s", url)
|
||||
}
|
||||
mr, err := c.findResource(sch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.draft = mr.draft
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
id, err := r.draft.resolveID(r.url, r.doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if id != "" {
|
||||
r.url = id
|
||||
}
|
||||
|
||||
if err := r.fillSubschemas(c, r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compileURL(url string, stack []schemaRef, ptr string) (*Schema, error) {
|
||||
// if url points to a draft, return Draft.meta
|
||||
if d := findDraft(url); d != nil && d.meta != nil {
|
||||
return d.meta, nil
|
||||
}
|
||||
|
||||
b, f := split(url)
|
||||
r, err := c.findResource(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.compileRef(r, stack, ptr, r, f)
|
||||
}
|
||||
|
||||
func (c *Compiler) compileRef(r *resource, stack []schemaRef, refPtr string, res *resource, ref string) (*Schema, error) {
|
||||
base := r.baseURL(res.floc)
|
||||
ref, err := resolveURL(base, ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
u, f := split(ref)
|
||||
sr := r.findResource(u)
|
||||
if sr == nil {
|
||||
// external resource
|
||||
return c.compileURL(ref, stack, refPtr)
|
||||
}
|
||||
|
||||
// ensure root resource is always compiled first.
|
||||
// this is required to get schema.meta from root resource
|
||||
if r.schema == nil {
|
||||
r.schema = newSchema(r.url, r.floc, r.doc)
|
||||
if _, err := c.compile(r, nil, schemaRef{"#", r.schema, false}, r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
sr, err = r.resolveFragment(c, sr, f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if sr == nil {
|
||||
return nil, fmt.Errorf("jsonschema: %s not found", ref)
|
||||
}
|
||||
|
||||
if sr.schema != nil {
|
||||
if err := checkLoop(stack, schemaRef{refPtr, sr.schema, false}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return sr.schema, nil
|
||||
}
|
||||
|
||||
sr.schema = newSchema(r.url, sr.floc, sr.doc)
|
||||
return c.compile(r, stack, schemaRef{refPtr, sr.schema, false}, sr)
|
||||
}
|
||||
|
||||
func (c *Compiler) compileDynamicAnchors(r *resource, res *resource) error {
|
||||
if r.draft.version < 2020 {
|
||||
return nil
|
||||
}
|
||||
|
||||
rr := r.listResources(res)
|
||||
rr = append(rr, res)
|
||||
for _, sr := range rr {
|
||||
if m, ok := sr.doc.(map[string]interface{}); ok {
|
||||
if _, ok := m["$dynamicAnchor"]; ok {
|
||||
sch, err := c.compileRef(r, nil, "IGNORED", r, sr.floc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res.schema.dynamicAnchors = append(res.schema.dynamicAnchors, sch)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compile(r *resource, stack []schemaRef, sref schemaRef, res *resource) (*Schema, error) {
|
||||
if err := c.compileDynamicAnchors(r, res); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch v := res.doc.(type) {
|
||||
case bool:
|
||||
res.schema.Always = &v
|
||||
return res.schema, nil
|
||||
default:
|
||||
return res.schema, c.compileMap(r, stack, sref, res)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Compiler) compileMap(r *resource, stack []schemaRef, sref schemaRef, res *resource) error {
|
||||
m := res.doc.(map[string]interface{})
|
||||
|
||||
if err := checkLoop(stack, sref); err != nil {
|
||||
return err
|
||||
}
|
||||
stack = append(stack, sref)
|
||||
|
||||
var s = res.schema
|
||||
var err error
|
||||
|
||||
if r == res { // root schema
|
||||
if sch, ok := m["$schema"]; ok {
|
||||
sch := sch.(string)
|
||||
if d := findDraft(sch); d != nil {
|
||||
s.meta = d.meta
|
||||
} else {
|
||||
if s.meta, err = c.compileRef(r, stack, "$schema", res, sch); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ref, ok := m["$ref"]; ok {
|
||||
s.Ref, err = c.compileRef(r, stack, "$ref", res, ref.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if r.draft.version < 2019 {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 2019 {
|
||||
if r == res { // root schema
|
||||
if vocab, ok := m["$vocabulary"]; ok {
|
||||
for url := range vocab.(map[string]interface{}) {
|
||||
if !r.draft.isVocab(url) {
|
||||
return fmt.Errorf("jsonschema: unsupported vocab %q in %s", url, res)
|
||||
}
|
||||
s.vocab = append(s.vocab, url)
|
||||
}
|
||||
} else {
|
||||
s.vocab = r.draft.defaultVocab
|
||||
}
|
||||
}
|
||||
|
||||
if ref, ok := m["$recursiveRef"]; ok {
|
||||
s.RecursiveRef, err = c.compileRef(r, stack, "$recursiveRef", res, ref.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if r.draft.version >= 2020 {
|
||||
if dref, ok := m["$dynamicRef"]; ok {
|
||||
s.DynamicRef, err = c.compileRef(r, stack, "$dynamicRef", res, dref.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loadInt := func(pname string) int {
|
||||
if num, ok := m[pname]; ok {
|
||||
i, _ := num.(json.Number).Float64()
|
||||
return int(i)
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
loadRat := func(pname string) *big.Rat {
|
||||
if num, ok := m[pname]; ok {
|
||||
r, _ := new(big.Rat).SetString(string(num.(json.Number)))
|
||||
return r
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if r.draft.version < 2019 || r.schema.meta.hasVocab("validation") {
|
||||
if t, ok := m["type"]; ok {
|
||||
switch t := t.(type) {
|
||||
case string:
|
||||
s.Types = []string{t}
|
||||
case []interface{}:
|
||||
s.Types = toStrings(t)
|
||||
}
|
||||
}
|
||||
|
||||
if e, ok := m["enum"]; ok {
|
||||
s.Enum = e.([]interface{})
|
||||
allPrimitives := true
|
||||
for _, item := range s.Enum {
|
||||
switch jsonType(item) {
|
||||
case "object", "array":
|
||||
allPrimitives = false
|
||||
break
|
||||
}
|
||||
}
|
||||
s.enumError = "enum failed"
|
||||
if allPrimitives {
|
||||
if len(s.Enum) == 1 {
|
||||
s.enumError = fmt.Sprintf("value must be %#v", s.Enum[0])
|
||||
} else {
|
||||
strEnum := make([]string, len(s.Enum))
|
||||
for i, item := range s.Enum {
|
||||
strEnum[i] = fmt.Sprintf("%#v", item)
|
||||
}
|
||||
s.enumError = fmt.Sprintf("value must be one of %s", strings.Join(strEnum, ", "))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
s.Minimum = loadRat("minimum")
|
||||
if exclusive, ok := m["exclusiveMinimum"]; ok {
|
||||
if exclusive, ok := exclusive.(bool); ok {
|
||||
if exclusive {
|
||||
s.Minimum, s.ExclusiveMinimum = nil, s.Minimum
|
||||
}
|
||||
} else {
|
||||
s.ExclusiveMinimum = loadRat("exclusiveMinimum")
|
||||
}
|
||||
}
|
||||
|
||||
s.Maximum = loadRat("maximum")
|
||||
if exclusive, ok := m["exclusiveMaximum"]; ok {
|
||||
if exclusive, ok := exclusive.(bool); ok {
|
||||
if exclusive {
|
||||
s.Maximum, s.ExclusiveMaximum = nil, s.Maximum
|
||||
}
|
||||
} else {
|
||||
s.ExclusiveMaximum = loadRat("exclusiveMaximum")
|
||||
}
|
||||
}
|
||||
|
||||
s.MultipleOf = loadRat("multipleOf")
|
||||
|
||||
s.MinProperties, s.MaxProperties = loadInt("minProperties"), loadInt("maxProperties")
|
||||
|
||||
if req, ok := m["required"]; ok {
|
||||
s.Required = toStrings(req.([]interface{}))
|
||||
}
|
||||
|
||||
s.MinItems, s.MaxItems = loadInt("minItems"), loadInt("maxItems")
|
||||
|
||||
if unique, ok := m["uniqueItems"]; ok {
|
||||
s.UniqueItems = unique.(bool)
|
||||
}
|
||||
|
||||
s.MinLength, s.MaxLength = loadInt("minLength"), loadInt("maxLength")
|
||||
|
||||
if pattern, ok := m["pattern"]; ok {
|
||||
s.Pattern = regexp.MustCompile(pattern.(string))
|
||||
}
|
||||
|
||||
if r.draft.version >= 2019 {
|
||||
s.MinContains, s.MaxContains = loadInt("minContains"), loadInt("maxContains")
|
||||
if s.MinContains == -1 {
|
||||
s.MinContains = 1
|
||||
}
|
||||
|
||||
if deps, ok := m["dependentRequired"]; ok {
|
||||
deps := deps.(map[string]interface{})
|
||||
s.DependentRequired = make(map[string][]string, len(deps))
|
||||
for pname, pvalue := range deps {
|
||||
s.DependentRequired[pname] = toStrings(pvalue.([]interface{}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
compile := func(stack []schemaRef, ptr string) (*Schema, error) {
|
||||
return c.compileRef(r, stack, ptr, res, r.url+res.floc+"/"+ptr)
|
||||
}
|
||||
|
||||
loadSchema := func(pname string, stack []schemaRef) (*Schema, error) {
|
||||
if _, ok := m[pname]; ok {
|
||||
return compile(stack, escape(pname))
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
loadSchemas := func(pname string, stack []schemaRef) ([]*Schema, error) {
|
||||
if pvalue, ok := m[pname]; ok {
|
||||
pvalue := pvalue.([]interface{})
|
||||
schemas := make([]*Schema, len(pvalue))
|
||||
for i := range pvalue {
|
||||
sch, err := compile(stack, escape(pname)+"/"+strconv.Itoa(i))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
schemas[i] = sch
|
||||
}
|
||||
return schemas, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if r.draft.version < 2019 || r.schema.meta.hasVocab("applicator") {
|
||||
if s.Not, err = loadSchema("not", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.AllOf, err = loadSchemas("allOf", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.AnyOf, err = loadSchemas("anyOf", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.OneOf, err = loadSchemas("oneOf", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if props, ok := m["properties"]; ok {
|
||||
props := props.(map[string]interface{})
|
||||
s.Properties = make(map[string]*Schema, len(props))
|
||||
for pname := range props {
|
||||
s.Properties[pname], err = compile(nil, "properties/"+escape(pname))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if regexProps, ok := m["regexProperties"]; ok {
|
||||
s.RegexProperties = regexProps.(bool)
|
||||
}
|
||||
|
||||
if patternProps, ok := m["patternProperties"]; ok {
|
||||
patternProps := patternProps.(map[string]interface{})
|
||||
s.PatternProperties = make(map[*regexp.Regexp]*Schema, len(patternProps))
|
||||
for pattern := range patternProps {
|
||||
s.PatternProperties[regexp.MustCompile(pattern)], err = compile(nil, "patternProperties/"+escape(pattern))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if additionalProps, ok := m["additionalProperties"]; ok {
|
||||
switch additionalProps := additionalProps.(type) {
|
||||
case bool:
|
||||
s.AdditionalProperties = additionalProps
|
||||
case map[string]interface{}:
|
||||
s.AdditionalProperties, err = compile(nil, "additionalProperties")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if deps, ok := m["dependencies"]; ok {
|
||||
deps := deps.(map[string]interface{})
|
||||
s.Dependencies = make(map[string]interface{}, len(deps))
|
||||
for pname, pvalue := range deps {
|
||||
switch pvalue := pvalue.(type) {
|
||||
case []interface{}:
|
||||
s.Dependencies[pname] = toStrings(pvalue)
|
||||
default:
|
||||
s.Dependencies[pname], err = compile(stack, "dependencies/"+escape(pname))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 6 {
|
||||
if s.PropertyNames, err = loadSchema("propertyNames", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Contains, err = loadSchema("contains", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 7 {
|
||||
if m["if"] != nil {
|
||||
if s.If, err = loadSchema("if", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Then, err = loadSchema("then", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Else, err = loadSchema("else", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if r.draft.version >= 2019 {
|
||||
if deps, ok := m["dependentSchemas"]; ok {
|
||||
deps := deps.(map[string]interface{})
|
||||
s.DependentSchemas = make(map[string]*Schema, len(deps))
|
||||
for pname := range deps {
|
||||
s.DependentSchemas[pname], err = compile(stack, "dependentSchemas/"+escape(pname))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 2020 {
|
||||
if s.PrefixItems, err = loadSchemas("prefixItems", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Items2020, err = loadSchema("items", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if items, ok := m["items"]; ok {
|
||||
switch items.(type) {
|
||||
case []interface{}:
|
||||
s.Items, err = loadSchemas("items", nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if additionalItems, ok := m["additionalItems"]; ok {
|
||||
switch additionalItems := additionalItems.(type) {
|
||||
case bool:
|
||||
s.AdditionalItems = additionalItems
|
||||
case map[string]interface{}:
|
||||
s.AdditionalItems, err = compile(nil, "additionalItems")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
default:
|
||||
s.Items, err = compile(nil, "items")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// unevaluatedXXX keywords were in "applicator" vocab in 2019, but moved to new vocab "unevaluated" in 2020
|
||||
if (r.draft.version == 2019 && r.schema.meta.hasVocab("applicator")) || (r.draft.version >= 2020 && r.schema.meta.hasVocab("unevaluated")) {
|
||||
if s.UnevaluatedProperties, err = loadSchema("unevaluatedProperties", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.UnevaluatedItems, err = loadSchema("unevaluatedItems", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if r.draft.version >= 2020 {
|
||||
// any item in an array that passes validation of the contains schema is considered "evaluated"
|
||||
s.ContainsEval = true
|
||||
}
|
||||
}
|
||||
|
||||
if format, ok := m["format"]; ok {
|
||||
s.Format = format.(string)
|
||||
if r.draft.version < 2019 || c.AssertFormat || r.schema.meta.hasVocab("format-assertion") {
|
||||
s.format, _ = Formats[s.Format]
|
||||
}
|
||||
}
|
||||
|
||||
if c.ExtractAnnotations {
|
||||
if title, ok := m["title"]; ok {
|
||||
s.Title = title.(string)
|
||||
}
|
||||
if description, ok := m["description"]; ok {
|
||||
s.Description = description.(string)
|
||||
}
|
||||
s.Default = m["default"]
|
||||
}
|
||||
|
||||
if r.draft.version >= 6 {
|
||||
if c, ok := m["const"]; ok {
|
||||
s.Constant = []interface{}{c}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 7 {
|
||||
if encoding, ok := m["contentEncoding"]; ok {
|
||||
s.ContentEncoding = encoding.(string)
|
||||
s.decoder, _ = Decoders[s.ContentEncoding]
|
||||
}
|
||||
if mediaType, ok := m["contentMediaType"]; ok {
|
||||
s.ContentMediaType = mediaType.(string)
|
||||
s.mediaType, _ = MediaTypes[s.ContentMediaType]
|
||||
if s.ContentSchema, err = loadSchema("contentSchema", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if c.ExtractAnnotations {
|
||||
if comment, ok := m["$comment"]; ok {
|
||||
s.Comment = comment.(string)
|
||||
}
|
||||
if readOnly, ok := m["readOnly"]; ok {
|
||||
s.ReadOnly = readOnly.(bool)
|
||||
}
|
||||
if writeOnly, ok := m["writeOnly"]; ok {
|
||||
s.WriteOnly = writeOnly.(bool)
|
||||
}
|
||||
if examples, ok := m["examples"]; ok {
|
||||
s.Examples = examples.([]interface{})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 2019 {
|
||||
if !c.AssertContent {
|
||||
s.decoder = nil
|
||||
s.mediaType = nil
|
||||
s.ContentSchema = nil
|
||||
}
|
||||
if c.ExtractAnnotations {
|
||||
if deprecated, ok := m["deprecated"]; ok {
|
||||
s.Deprecated = deprecated.(bool)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for name, ext := range c.extensions {
|
||||
es, err := ext.compiler.Compile(CompilerContext{c, r, stack, res}, m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if es != nil {
|
||||
if s.Extensions == nil {
|
||||
s.Extensions = make(map[string]ExtSchema)
|
||||
}
|
||||
s.Extensions[name] = es
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Compiler) validateSchema(r *resource, v interface{}, vloc string) error {
|
||||
validate := func(meta *Schema) error {
|
||||
if meta == nil {
|
||||
return nil
|
||||
}
|
||||
return meta.validateValue(v, vloc)
|
||||
}
|
||||
|
||||
if err := validate(r.draft.meta); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, ext := range c.extensions {
|
||||
if err := validate(ext.meta); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func toStrings(arr []interface{}) []string {
|
||||
s := make([]string, len(arr))
|
||||
for i, v := range arr {
|
||||
s[i] = v.(string)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// SchemaRef captures schema and the path referring to it.
|
||||
type schemaRef struct {
|
||||
path string // relative-json-pointer to schema
|
||||
schema *Schema // target schema
|
||||
discard bool // true when scope left
|
||||
}
|
||||
|
||||
func (sr schemaRef) String() string {
|
||||
return fmt.Sprintf("(%s)%v", sr.path, sr.schema)
|
||||
}
|
||||
|
||||
func checkLoop(stack []schemaRef, sref schemaRef) error {
|
||||
for _, ref := range stack {
|
||||
if ref.schema == sref.schema {
|
||||
return infiniteLoopError(stack, sref)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func keywordLocation(stack []schemaRef, path string) string {
|
||||
var loc string
|
||||
for _, ref := range stack[1:] {
|
||||
loc += "/" + ref.path
|
||||
}
|
||||
if path != "" {
|
||||
loc = loc + "/" + path
|
||||
}
|
||||
return loc
|
||||
}
|
||||
29
vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
generated
vendored
Normal file
29
vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
generated
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
// Decoders is a registry of functions, which know how to decode
|
||||
// string encoded in specific format.
|
||||
//
|
||||
// New Decoders can be registered by adding to this map. Key is encoding name,
|
||||
// value is function that knows how to decode string in that format.
|
||||
var Decoders = map[string]func(string) ([]byte, error){
|
||||
"base64": base64.StdEncoding.DecodeString,
|
||||
}
|
||||
|
||||
// MediaTypes is a registry of functions, which know how to validate
|
||||
// whether the bytes represent data of that mediaType.
|
||||
//
|
||||
// New mediaTypes can be registered by adding to this map. Key is mediaType name,
|
||||
// value is function that knows how to validate that mediaType.
|
||||
var MediaTypes = map[string]func([]byte) error{
|
||||
"application/json": validateJSON,
|
||||
}
|
||||
|
||||
func validateJSON(b []byte) error {
|
||||
var v interface{}
|
||||
return json.Unmarshal(b, &v)
|
||||
}
|
||||
49
vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
generated
vendored
Normal file
49
vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
generated
vendored
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
Package jsonschema provides json-schema compilation and validation.
|
||||
|
||||
Features:
|
||||
- implements draft 2020-12, 2019-09, draft-7, draft-6, draft-4
|
||||
- fully compliant with JSON-Schema-Test-Suite, (excluding some optional)
|
||||
- list of optional tests that are excluded can be found in schema_test.go(variable skipTests)
|
||||
- validates schemas against meta-schema
|
||||
- full support of remote references
|
||||
- support of recursive references between schemas
|
||||
- detects infinite loop in schemas
|
||||
- thread safe validation
|
||||
- rich, intuitive hierarchial error messages with json-pointers to exact location
|
||||
- supports output formats flag, basic and detailed
|
||||
- supports enabling format and content Assertions in draft2019-09 or above
|
||||
- change Compiler.AssertFormat, Compiler.AssertContent to true
|
||||
- compiled schema can be introspected. easier to develop tools like generating go structs given schema
|
||||
- supports user-defined keywords via extensions
|
||||
- implements following formats (supports user-defined)
|
||||
- date-time, date, time, duration (supports leap-second)
|
||||
- uuid, hostname, email
|
||||
- ip-address, ipv4, ipv6
|
||||
- uri, uriref, uri-template(limited validation)
|
||||
- json-pointer, relative-json-pointer
|
||||
- regex, format
|
||||
- implements following contentEncoding (supports user-defined)
|
||||
- base64
|
||||
- implements following contentMediaType (supports user-defined)
|
||||
- application/json
|
||||
- can load from files/http/https/string/[]byte/io.Reader (supports user-defined)
|
||||
|
||||
The schema is compiled against the version specified in "$schema" property.
|
||||
If "$schema" property is missing, it uses latest draft which currently implemented
|
||||
by this library.
|
||||
|
||||
You can force to use specific draft, when "$schema" is missing, as follows:
|
||||
|
||||
compiler := jsonschema.NewCompiler()
|
||||
compiler.Draft = jsonschema.Draft4
|
||||
|
||||
This package supports loading json-schema from filePath and fileURL.
|
||||
|
||||
To load json-schema from HTTPURL, add following import:
|
||||
|
||||
import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
|
||||
you can validate yaml documents. see https://play.golang.org/p/sJy1qY7dXgA
|
||||
*/
|
||||
package jsonschema
|
||||
1432
vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
generated
vendored
Normal file
1432
vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
129
vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
generated
vendored
Normal file
129
vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
generated
vendored
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// InvalidJSONTypeError is the error type returned by ValidateInterface.
|
||||
// this tells that specified go object is not valid jsonType.
|
||||
type InvalidJSONTypeError string
|
||||
|
||||
func (e InvalidJSONTypeError) Error() string {
|
||||
return fmt.Sprintf("jsonschema: invalid jsonType: %s", string(e))
|
||||
}
|
||||
|
||||
// InfiniteLoopError is returned by Compile/Validate.
|
||||
// this gives url#keywordLocation that lead to infinity loop.
|
||||
type InfiniteLoopError string
|
||||
|
||||
func (e InfiniteLoopError) Error() string {
|
||||
return "jsonschema: infinite loop " + string(e)
|
||||
}
|
||||
|
||||
func infiniteLoopError(stack []schemaRef, sref schemaRef) InfiniteLoopError {
|
||||
var path string
|
||||
for _, ref := range stack {
|
||||
if path == "" {
|
||||
path += ref.schema.Location
|
||||
} else {
|
||||
path += "/" + ref.path
|
||||
}
|
||||
}
|
||||
return InfiniteLoopError(path + "/" + sref.path)
|
||||
}
|
||||
|
||||
// SchemaError is the error type returned by Compile.
|
||||
type SchemaError struct {
|
||||
// SchemaURL is the url to json-schema that filed to compile.
|
||||
// This is helpful, if your schema refers to external schemas
|
||||
SchemaURL string
|
||||
|
||||
// Err is the error that occurred during compilation.
|
||||
// It could be ValidationError, because compilation validates
|
||||
// given schema against the json meta-schema
|
||||
Err error
|
||||
}
|
||||
|
||||
func (se *SchemaError) Unwrap() error {
|
||||
return se.Err
|
||||
}
|
||||
|
||||
func (se *SchemaError) Error() string {
|
||||
s := fmt.Sprintf("jsonschema %s compilation failed", se.SchemaURL)
|
||||
if se.Err != nil {
|
||||
return fmt.Sprintf("%s: %v", s, strings.TrimPrefix(se.Err.Error(), "jsonschema: "))
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (se *SchemaError) GoString() string {
|
||||
if _, ok := se.Err.(*ValidationError); ok {
|
||||
return fmt.Sprintf("jsonschema %s compilation failed\n%#v", se.SchemaURL, se.Err)
|
||||
}
|
||||
return se.Error()
|
||||
}
|
||||
|
||||
// ValidationError is the error type returned by Validate.
|
||||
type ValidationError struct {
|
||||
KeywordLocation string // validation path of validating keyword or schema
|
||||
AbsoluteKeywordLocation string // absolute location of validating keyword or schema
|
||||
InstanceLocation string // location of the json value within the instance being validated
|
||||
Message string // describes error
|
||||
Causes []*ValidationError // nested validation errors
|
||||
}
|
||||
|
||||
func (ve *ValidationError) add(causes ...error) error {
|
||||
for _, cause := range causes {
|
||||
ve.Causes = append(ve.Causes, cause.(*ValidationError))
|
||||
}
|
||||
return ve
|
||||
}
|
||||
|
||||
func (ve *ValidationError) causes(err error) error {
|
||||
if err := err.(*ValidationError); err.Message == "" {
|
||||
ve.Causes = err.Causes
|
||||
} else {
|
||||
ve.add(err)
|
||||
}
|
||||
return ve
|
||||
}
|
||||
|
||||
func (ve *ValidationError) Error() string {
|
||||
leaf := ve
|
||||
for len(leaf.Causes) > 0 {
|
||||
leaf = leaf.Causes[0]
|
||||
}
|
||||
u, _ := split(ve.AbsoluteKeywordLocation)
|
||||
return fmt.Sprintf("jsonschema: %s does not validate with %s: %s", quote(leaf.InstanceLocation), u+"#"+leaf.KeywordLocation, leaf.Message)
|
||||
}
|
||||
|
||||
func (ve *ValidationError) GoString() string {
|
||||
sloc := ve.AbsoluteKeywordLocation
|
||||
sloc = sloc[strings.IndexByte(sloc, '#')+1:]
|
||||
msg := fmt.Sprintf("[I#%s] [S#%s] %s", ve.InstanceLocation, sloc, ve.Message)
|
||||
for _, c := range ve.Causes {
|
||||
for _, line := range strings.Split(c.GoString(), "\n") {
|
||||
msg += "\n " + line
|
||||
}
|
||||
}
|
||||
return msg
|
||||
}
|
||||
|
||||
func joinPtr(ptr1, ptr2 string) string {
|
||||
if len(ptr1) == 0 {
|
||||
return ptr2
|
||||
}
|
||||
if len(ptr2) == 0 {
|
||||
return ptr1
|
||||
}
|
||||
return ptr1 + "/" + ptr2
|
||||
}
|
||||
|
||||
// quote returns single-quoted string
|
||||
func quote(s string) string {
|
||||
s = fmt.Sprintf("%q", s)
|
||||
s = strings.ReplaceAll(s, `\"`, `"`)
|
||||
s = strings.ReplaceAll(s, `'`, `\'`)
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
116
vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
generated
vendored
Normal file
116
vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
generated
vendored
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
package jsonschema
|
||||
|
||||
// ExtCompiler compiles custom keyword(s) into ExtSchema.
|
||||
type ExtCompiler interface {
|
||||
// Compile compiles the custom keywords in schema m and returns its compiled representation.
|
||||
// if the schema m does not contain the keywords defined by this extension,
|
||||
// compiled representation nil should be returned.
|
||||
Compile(ctx CompilerContext, m map[string]interface{}) (ExtSchema, error)
|
||||
}
|
||||
|
||||
// ExtSchema is schema representation of custom keyword(s)
|
||||
type ExtSchema interface {
|
||||
// Validate validates the json value v with this ExtSchema.
|
||||
// Returned error must be *ValidationError.
|
||||
Validate(ctx ValidationContext, v interface{}) error
|
||||
}
|
||||
|
||||
type extension struct {
|
||||
meta *Schema
|
||||
compiler ExtCompiler
|
||||
}
|
||||
|
||||
// RegisterExtension registers custom keyword(s) into this compiler.
|
||||
//
|
||||
// name is extension name, used only to avoid name collisions.
|
||||
// meta captures the metaschema for the new keywords.
|
||||
// This is used to validate the schema before calling ext.Compile.
|
||||
func (c *Compiler) RegisterExtension(name string, meta *Schema, ext ExtCompiler) {
|
||||
c.extensions[name] = extension{meta, ext}
|
||||
}
|
||||
|
||||
// CompilerContext ---
|
||||
|
||||
// CompilerContext provides additional context required in compiling for extension.
|
||||
type CompilerContext struct {
|
||||
c *Compiler
|
||||
r *resource
|
||||
stack []schemaRef
|
||||
res *resource
|
||||
}
|
||||
|
||||
// Compile compiles given value at ptr into *Schema. This is useful in implementing
|
||||
// keyword like allOf/not/patternProperties.
|
||||
//
|
||||
// schPath is the relative-json-pointer to the schema to be compiled from parent schema.
|
||||
//
|
||||
// applicableOnSameInstance tells whether current schema and the given schema
|
||||
// are applied on same instance value. this is used to detect infinite loop in schema.
|
||||
func (ctx CompilerContext) Compile(schPath string, applicableOnSameInstance bool) (*Schema, error) {
|
||||
var stack []schemaRef
|
||||
if applicableOnSameInstance {
|
||||
stack = ctx.stack
|
||||
}
|
||||
return ctx.c.compileRef(ctx.r, stack, schPath, ctx.res, ctx.r.url+ctx.res.floc+"/"+schPath)
|
||||
}
|
||||
|
||||
// CompileRef compiles the schema referenced by ref uri
|
||||
//
|
||||
// refPath is the relative-json-pointer to ref.
|
||||
//
|
||||
// applicableOnSameInstance tells whether current schema and the given schema
|
||||
// are applied on same instance value. this is used to detect infinite loop in schema.
|
||||
func (ctx CompilerContext) CompileRef(ref string, refPath string, applicableOnSameInstance bool) (*Schema, error) {
|
||||
var stack []schemaRef
|
||||
if applicableOnSameInstance {
|
||||
stack = ctx.stack
|
||||
}
|
||||
return ctx.c.compileRef(ctx.r, stack, refPath, ctx.res, ref)
|
||||
}
|
||||
|
||||
// ValidationContext ---
|
||||
|
||||
// ValidationContext provides additional context required in validating for extension.
|
||||
type ValidationContext struct {
|
||||
result validationResult
|
||||
validate func(sch *Schema, schPath string, v interface{}, vpath string) error
|
||||
validateInplace func(sch *Schema, schPath string) error
|
||||
validationError func(keywordPath string, format string, a ...interface{}) *ValidationError
|
||||
}
|
||||
|
||||
// EvaluatedProp marks given property of object as evaluated.
|
||||
func (ctx ValidationContext) EvaluatedProp(prop string) {
|
||||
delete(ctx.result.unevalProps, prop)
|
||||
}
|
||||
|
||||
// EvaluatedItem marks given index of array as evaluated.
|
||||
func (ctx ValidationContext) EvaluatedItem(index int) {
|
||||
delete(ctx.result.unevalItems, index)
|
||||
}
|
||||
|
||||
// Validate validates schema s with value v. Extension must use this method instead of
|
||||
// *Schema.ValidateInterface method. This will be useful in implementing keywords like
|
||||
// allOf/oneOf
|
||||
//
|
||||
// spath is relative-json-pointer to s
|
||||
// vpath is relative-json-pointer to v.
|
||||
func (ctx ValidationContext) Validate(s *Schema, spath string, v interface{}, vpath string) error {
|
||||
if vpath == "" {
|
||||
return ctx.validateInplace(s, spath)
|
||||
}
|
||||
return ctx.validate(s, spath, v, vpath)
|
||||
}
|
||||
|
||||
// Error used to construct validation error by extensions.
|
||||
//
|
||||
// keywordPath is relative-json-pointer to keyword.
|
||||
func (ctx ValidationContext) Error(keywordPath string, format string, a ...interface{}) *ValidationError {
|
||||
return ctx.validationError(keywordPath, format, a...)
|
||||
}
|
||||
|
||||
// Group is used by extensions to group multiple errors as causes to parent error.
|
||||
// This is useful in implementing keywords like allOf where each schema specified
|
||||
// in allOf can result a validationError.
|
||||
func (ValidationError) Group(parent *ValidationError, causes ...error) error {
|
||||
return parent.add(causes...)
|
||||
}
|
||||
567
vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
generated
vendored
Normal file
567
vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
generated
vendored
Normal file
|
|
@ -0,0 +1,567 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net"
|
||||
"net/mail"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Formats is a registry of functions, which know how to validate
|
||||
// a specific format.
|
||||
//
|
||||
// New Formats can be registered by adding to this map. Key is format name,
|
||||
// value is function that knows how to validate that format.
|
||||
var Formats = map[string]func(interface{}) bool{
|
||||
"date-time": isDateTime,
|
||||
"date": isDate,
|
||||
"time": isTime,
|
||||
"duration": isDuration,
|
||||
"period": isPeriod,
|
||||
"hostname": isHostname,
|
||||
"email": isEmail,
|
||||
"ip-address": isIPV4,
|
||||
"ipv4": isIPV4,
|
||||
"ipv6": isIPV6,
|
||||
"uri": isURI,
|
||||
"iri": isURI,
|
||||
"uri-reference": isURIReference,
|
||||
"uriref": isURIReference,
|
||||
"iri-reference": isURIReference,
|
||||
"uri-template": isURITemplate,
|
||||
"regex": isRegex,
|
||||
"json-pointer": isJSONPointer,
|
||||
"relative-json-pointer": isRelativeJSONPointer,
|
||||
"uuid": isUUID,
|
||||
}
|
||||
|
||||
// isDateTime tells whether given string is a valid date representation
|
||||
// as defined by RFC 3339, section 5.6.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
|
||||
func isDateTime(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if len(s) < 20 { // yyyy-mm-ddThh:mm:ssZ
|
||||
return false
|
||||
}
|
||||
if s[10] != 'T' && s[10] != 't' {
|
||||
return false
|
||||
}
|
||||
return isDate(s[:10]) && isTime(s[11:])
|
||||
}
|
||||
|
||||
// isDate tells whether given string is a valid full-date production
|
||||
// as defined by RFC 3339, section 5.6.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
|
||||
func isDate(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
_, err := time.Parse("2006-01-02", s)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// isTime tells whether given string is a valid full-time production
|
||||
// as defined by RFC 3339, section 5.6.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
|
||||
func isTime(v interface{}) bool {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
|
||||
// golang time package does not support leap seconds.
|
||||
// so we are parsing it manually here.
|
||||
|
||||
// hh:mm:ss
|
||||
// 01234567
|
||||
if len(str) < 9 || str[2] != ':' || str[5] != ':' {
|
||||
return false
|
||||
}
|
||||
isInRange := func(str string, min, max int) (int, bool) {
|
||||
n, err := strconv.Atoi(str)
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
if n < min || n > max {
|
||||
return 0, false
|
||||
}
|
||||
return n, true
|
||||
}
|
||||
var h, m, s int
|
||||
if h, ok = isInRange(str[0:2], 0, 23); !ok {
|
||||
return false
|
||||
}
|
||||
if m, ok = isInRange(str[3:5], 0, 59); !ok {
|
||||
return false
|
||||
}
|
||||
if s, ok = isInRange(str[6:8], 0, 60); !ok {
|
||||
return false
|
||||
}
|
||||
str = str[8:]
|
||||
|
||||
// parse secfrac if present
|
||||
if str[0] == '.' {
|
||||
// dot following more than one digit
|
||||
str = str[1:]
|
||||
var numDigits int
|
||||
for str != "" {
|
||||
if str[0] < '0' || str[0] > '9' {
|
||||
break
|
||||
}
|
||||
numDigits++
|
||||
str = str[1:]
|
||||
}
|
||||
if numDigits == 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if len(str) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
if str[0] == 'z' || str[0] == 'Z' {
|
||||
if len(str) != 1 {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
// time-numoffset
|
||||
// +hh:mm
|
||||
// 012345
|
||||
if len(str) != 6 || str[3] != ':' {
|
||||
return false
|
||||
}
|
||||
|
||||
var sign int
|
||||
if str[0] == '+' {
|
||||
sign = -1
|
||||
} else if str[0] == '-' {
|
||||
sign = +1
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
|
||||
var zh, zm int
|
||||
if zh, ok = isInRange(str[1:3], 0, 23); !ok {
|
||||
return false
|
||||
}
|
||||
if zm, ok = isInRange(str[4:6], 0, 59); !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
// apply timezone offset
|
||||
hm := (h*60 + m) + sign*(zh*60+zm)
|
||||
if hm < 0 {
|
||||
hm += 24 * 60
|
||||
}
|
||||
h, m = hm/60, hm%60
|
||||
}
|
||||
|
||||
// check leapsecond
|
||||
if s == 60 { // leap second
|
||||
if h != 23 || m != 59 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// isDuration tells whether given string is a valid duration format
|
||||
// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
|
||||
func isDuration(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if len(s) == 0 || s[0] != 'P' {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
parseUnits := func() (units string, ok bool) {
|
||||
for len(s) > 0 && s[0] != 'T' {
|
||||
digits := false
|
||||
for {
|
||||
if len(s) == 0 {
|
||||
break
|
||||
}
|
||||
if s[0] < '0' || s[0] > '9' {
|
||||
break
|
||||
}
|
||||
digits = true
|
||||
s = s[1:]
|
||||
}
|
||||
if !digits || len(s) == 0 {
|
||||
return units, false
|
||||
}
|
||||
units += s[:1]
|
||||
s = s[1:]
|
||||
}
|
||||
return units, true
|
||||
}
|
||||
units, ok := parseUnits()
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
if units == "W" {
|
||||
return len(s) == 0 // P_W
|
||||
}
|
||||
if len(units) > 0 {
|
||||
if strings.Index("YMD", units) == -1 {
|
||||
return false
|
||||
}
|
||||
if len(s) == 0 {
|
||||
return true // "P" dur-date
|
||||
}
|
||||
}
|
||||
if len(s) == 0 || s[0] != 'T' {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
units, ok = parseUnits()
|
||||
return ok && len(s) == 0 && len(units) > 0 && strings.Index("HMS", units) != -1
|
||||
}
|
||||
|
||||
// isPeriod tells whether given string is a valid period format
|
||||
// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
|
||||
func isPeriod(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
slash := strings.IndexByte(s, '/')
|
||||
if slash == -1 {
|
||||
return false
|
||||
}
|
||||
start, end := s[:slash], s[slash+1:]
|
||||
if isDateTime(start) {
|
||||
return isDateTime(end) || isDuration(end)
|
||||
}
|
||||
return isDuration(start) && isDateTime(end)
|
||||
}
|
||||
|
||||
// isHostname tells whether given string is a valid representation
|
||||
// for an Internet host name, as defined by RFC 1034 section 3.1 and
|
||||
// RFC 1123 section 2.1.
|
||||
//
|
||||
// See https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names, for details.
|
||||
func isHostname(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
// entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters
|
||||
s = strings.TrimSuffix(s, ".")
|
||||
if len(s) > 253 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Hostnames are composed of series of labels concatenated with dots, as are all domain names
|
||||
for _, label := range strings.Split(s, ".") {
|
||||
// Each label must be from 1 to 63 characters long
|
||||
if labelLen := len(label); labelLen < 1 || labelLen > 63 {
|
||||
return false
|
||||
}
|
||||
|
||||
// labels must not start with a hyphen
|
||||
// RFC 1123 section 2.1: restriction on the first character
|
||||
// is relaxed to allow either a letter or a digit
|
||||
if first := s[0]; first == '-' {
|
||||
return false
|
||||
}
|
||||
|
||||
// must not end with a hyphen
|
||||
if label[len(label)-1] == '-' {
|
||||
return false
|
||||
}
|
||||
|
||||
// labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner),
|
||||
// the digits '0' through '9', and the hyphen ('-')
|
||||
for _, c := range label {
|
||||
if valid := (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '-'); !valid {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// isEmail tells whether given string is a valid Internet email address
|
||||
// as defined by RFC 5322, section 3.4.1.
|
||||
//
|
||||
// See https://en.wikipedia.org/wiki/Email_address, for details.
|
||||
func isEmail(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
// entire email address to be no more than 254 characters long
|
||||
if len(s) > 254 {
|
||||
return false
|
||||
}
|
||||
|
||||
// email address is generally recognized as having two parts joined with an at-sign
|
||||
at := strings.LastIndexByte(s, '@')
|
||||
if at == -1 {
|
||||
return false
|
||||
}
|
||||
local := s[0:at]
|
||||
domain := s[at+1:]
|
||||
|
||||
// local part may be up to 64 characters long
|
||||
if len(local) > 64 {
|
||||
return false
|
||||
}
|
||||
|
||||
// domain if enclosed in brackets, must match an IP address
|
||||
if len(domain) >= 2 && domain[0] == '[' && domain[len(domain)-1] == ']' {
|
||||
ip := domain[1 : len(domain)-1]
|
||||
if strings.HasPrefix(ip, "IPv6:") {
|
||||
return isIPV6(strings.TrimPrefix(ip, "IPv6:"))
|
||||
}
|
||||
return isIPV4(ip)
|
||||
}
|
||||
|
||||
// domain must match the requirements for a hostname
|
||||
if !isHostname(domain) {
|
||||
return false
|
||||
}
|
||||
|
||||
_, err := mail.ParseAddress(s)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// isIPV4 tells whether given string is a valid representation of an IPv4 address
|
||||
// according to the "dotted-quad" ABNF syntax as defined in RFC 2673, section 3.2.
|
||||
func isIPV4(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
groups := strings.Split(s, ".")
|
||||
if len(groups) != 4 {
|
||||
return false
|
||||
}
|
||||
for _, group := range groups {
|
||||
n, err := strconv.Atoi(group)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
if n < 0 || n > 255 {
|
||||
return false
|
||||
}
|
||||
if n != 0 && group[0] == '0' {
|
||||
return false // leading zeroes should be rejected, as they are treated as octals
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isIPV6 tells whether given string is a valid representation of an IPv6 address
|
||||
// as defined in RFC 2373, section 2.2.
|
||||
func isIPV6(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if !strings.Contains(s, ":") {
|
||||
return false
|
||||
}
|
||||
return net.ParseIP(s) != nil
|
||||
}
|
||||
|
||||
// isURI tells whether given string is valid URI, according to RFC 3986.
|
||||
func isURI(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
u, err := urlParse(s)
|
||||
return err == nil && u.IsAbs()
|
||||
}
|
||||
|
||||
func urlParse(s string) (*url.URL, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if hostname is ipv6, validate it
|
||||
hostname := u.Hostname()
|
||||
if strings.IndexByte(hostname, ':') != -1 {
|
||||
if strings.IndexByte(u.Host, '[') == -1 || strings.IndexByte(u.Host, ']') == -1 {
|
||||
return nil, errors.New("ipv6 address is not enclosed in brackets")
|
||||
}
|
||||
if !isIPV6(hostname) {
|
||||
return nil, errors.New("invalid ipv6 address")
|
||||
}
|
||||
}
|
||||
return u, nil
|
||||
}
|
||||
|
||||
// isURIReference tells whether given string is a valid URI Reference
|
||||
// (either a URI or a relative-reference), according to RFC 3986.
|
||||
func isURIReference(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
_, err := urlParse(s)
|
||||
return err == nil && !strings.Contains(s, `\`)
|
||||
}
|
||||
|
||||
// isURITemplate tells whether given string is a valid URI Template
|
||||
// according to RFC6570.
|
||||
//
|
||||
// Current implementation does minimal validation.
|
||||
func isURITemplate(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
u, err := urlParse(s)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
for _, item := range strings.Split(u.RawPath, "/") {
|
||||
depth := 0
|
||||
for _, ch := range item {
|
||||
switch ch {
|
||||
case '{':
|
||||
depth++
|
||||
if depth != 1 {
|
||||
return false
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
if depth != 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
if depth != 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isRegex tells whether given string is a valid regular expression,
|
||||
// according to the ECMA 262 regular expression dialect.
|
||||
//
|
||||
// The implementation uses go-lang regexp package.
|
||||
func isRegex(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
_, err := regexp.Compile(s)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// isJSONPointer tells whether given string is a valid JSON Pointer.
|
||||
//
|
||||
// Note: It returns false for JSON Pointer URI fragments.
|
||||
func isJSONPointer(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if s != "" && !strings.HasPrefix(s, "/") {
|
||||
return false
|
||||
}
|
||||
for _, item := range strings.Split(s, "/") {
|
||||
for i := 0; i < len(item); i++ {
|
||||
if item[i] == '~' {
|
||||
if i == len(item)-1 {
|
||||
return false
|
||||
}
|
||||
switch item[i+1] {
|
||||
case '0', '1':
|
||||
// valid
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isRelativeJSONPointer tells whether given string is a valid Relative JSON Pointer.
|
||||
//
|
||||
// see https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
func isRelativeJSONPointer(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if s == "" {
|
||||
return false
|
||||
}
|
||||
if s[0] == '0' {
|
||||
s = s[1:]
|
||||
} else if s[0] >= '0' && s[0] <= '9' {
|
||||
for s != "" && s[0] >= '0' && s[0] <= '9' {
|
||||
s = s[1:]
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
return s == "#" || isJSONPointer(s)
|
||||
}
|
||||
|
||||
// isUUID tells whether given string is a valid uuid format
|
||||
// as specified in RFC4122.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc4122#page-4, for details
|
||||
func isUUID(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
parseHex := func(n int) bool {
|
||||
for n > 0 {
|
||||
if len(s) == 0 {
|
||||
return false
|
||||
}
|
||||
hex := (s[0] >= '0' && s[0] <= '9') || (s[0] >= 'a' && s[0] <= 'f') || (s[0] >= 'A' && s[0] <= 'F')
|
||||
if !hex {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
n--
|
||||
}
|
||||
return true
|
||||
}
|
||||
groups := []int{8, 4, 4, 4, 12}
|
||||
for i, numDigits := range groups {
|
||||
if !parseHex(numDigits) {
|
||||
return false
|
||||
}
|
||||
if i == len(groups)-1 {
|
||||
break
|
||||
}
|
||||
if len(s) == 0 || s[0] != '-' {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
}
|
||||
return len(s) == 0
|
||||
}
|
||||
60
vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
generated
vendored
Normal file
60
vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
generated
vendored
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func loadFileURL(s string) (io.ReadCloser, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f := u.Path
|
||||
if runtime.GOOS == "windows" {
|
||||
f = strings.TrimPrefix(f, "/")
|
||||
f = filepath.FromSlash(f)
|
||||
}
|
||||
return os.Open(f)
|
||||
}
|
||||
|
||||
// Loaders is a registry of functions, which know how to load
|
||||
// absolute url of specific schema.
|
||||
//
|
||||
// New loaders can be registered by adding to this map. Key is schema,
|
||||
// value is function that knows how to load url of that schema
|
||||
var Loaders = map[string]func(url string) (io.ReadCloser, error){
|
||||
"file": loadFileURL,
|
||||
}
|
||||
|
||||
// LoaderNotFoundError is the error type returned by Load function.
|
||||
// It tells that no Loader is registered for that URL Scheme.
|
||||
type LoaderNotFoundError string
|
||||
|
||||
func (e LoaderNotFoundError) Error() string {
|
||||
return fmt.Sprintf("jsonschema: no Loader found for %s", string(e))
|
||||
}
|
||||
|
||||
// LoadURL loads document at given absolute URL. The default implementation
|
||||
// uses Loaders registry to lookup by schema and uses that loader.
|
||||
//
|
||||
// Users can change this variable, if they would like to take complete
|
||||
// responsibility of loading given URL. Used by Compiler if its LoadURL
|
||||
// field is nil.
|
||||
var LoadURL = func(s string) (io.ReadCloser, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
loader, ok := Loaders[u.Scheme]
|
||||
if !ok {
|
||||
return nil, LoaderNotFoundError(s)
|
||||
|
||||
}
|
||||
return loader(s)
|
||||
}
|
||||
77
vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
generated
vendored
Normal file
77
vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
generated
vendored
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
package jsonschema
|
||||
|
||||
// Flag is output format with simple boolean property valid.
|
||||
type Flag struct {
|
||||
Valid bool `json:"valid"`
|
||||
}
|
||||
|
||||
// FlagOutput returns output in flag format
|
||||
func (ve *ValidationError) FlagOutput() Flag {
|
||||
return Flag{}
|
||||
}
|
||||
|
||||
// Basic ---
|
||||
|
||||
// Basic is output format with flat list of output units.
|
||||
type Basic struct {
|
||||
Valid bool `json:"valid"`
|
||||
Errors []BasicError `json:"errors"`
|
||||
}
|
||||
|
||||
// BasicError is output unit in basic format.
|
||||
type BasicError struct {
|
||||
KeywordLocation string `json:"keywordLocation"`
|
||||
AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
|
||||
InstanceLocation string `json:"instanceLocation"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
// BasicOutput returns output in basic format
|
||||
func (ve *ValidationError) BasicOutput() Basic {
|
||||
var errors []BasicError
|
||||
var flatten func(*ValidationError)
|
||||
flatten = func(ve *ValidationError) {
|
||||
errors = append(errors, BasicError{
|
||||
KeywordLocation: ve.KeywordLocation,
|
||||
AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
|
||||
InstanceLocation: ve.InstanceLocation,
|
||||
Error: ve.Message,
|
||||
})
|
||||
for _, cause := range ve.Causes {
|
||||
flatten(cause)
|
||||
}
|
||||
}
|
||||
flatten(ve)
|
||||
return Basic{Errors: errors}
|
||||
}
|
||||
|
||||
// Detailed ---
|
||||
|
||||
// Detailed is output format based on structure of schema.
|
||||
type Detailed struct {
|
||||
Valid bool `json:"valid"`
|
||||
KeywordLocation string `json:"keywordLocation"`
|
||||
AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
|
||||
InstanceLocation string `json:"instanceLocation"`
|
||||
Error string `json:"error,omitempty"`
|
||||
Errors []Detailed `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
// DetailedOutput returns output in detailed format
|
||||
func (ve *ValidationError) DetailedOutput() Detailed {
|
||||
var errors []Detailed
|
||||
for _, cause := range ve.Causes {
|
||||
errors = append(errors, cause.DetailedOutput())
|
||||
}
|
||||
var message = ve.Message
|
||||
if len(ve.Causes) > 0 {
|
||||
message = ""
|
||||
}
|
||||
return Detailed{
|
||||
KeywordLocation: ve.KeywordLocation,
|
||||
AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
|
||||
InstanceLocation: ve.InstanceLocation,
|
||||
Error: message,
|
||||
Errors: errors,
|
||||
}
|
||||
}
|
||||
280
vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
generated
vendored
Normal file
280
vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
generated
vendored
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type resource struct {
|
||||
url string // base url of resource. can be empty
|
||||
floc string // fragment with json-pointer from root resource
|
||||
doc interface{}
|
||||
draft *Draft
|
||||
subresources map[string]*resource // key is floc. only applicable for root resource
|
||||
schema *Schema
|
||||
}
|
||||
|
||||
func (r *resource) String() string {
|
||||
return r.url + r.floc
|
||||
}
|
||||
|
||||
func newResource(url string, r io.Reader) (*resource, error) {
|
||||
if strings.IndexByte(url, '#') != -1 {
|
||||
panic(fmt.Sprintf("BUG: newResource(%q)", url))
|
||||
}
|
||||
doc, err := unmarshal(r)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("jsonschema: invalid json %s: %v", url, err)
|
||||
}
|
||||
url, err = toAbs(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resource{
|
||||
url: url,
|
||||
floc: "#",
|
||||
doc: doc,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// fillSubschemas fills subschemas in res into r.subresources
|
||||
func (r *resource) fillSubschemas(c *Compiler, res *resource) error {
|
||||
if err := c.validateSchema(r, res.doc, res.floc[1:]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if r.subresources == nil {
|
||||
r.subresources = make(map[string]*resource)
|
||||
}
|
||||
if err := r.draft.listSubschemas(res, r.baseURL(res.floc), r.subresources); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// ensure subresource.url uniqueness
|
||||
url2floc := make(map[string]string)
|
||||
for _, sr := range r.subresources {
|
||||
if sr.url != "" {
|
||||
if floc, ok := url2floc[sr.url]; ok {
|
||||
return fmt.Errorf("jsonschema: %q and %q in %s have same canonical-uri", floc[1:], sr.floc[1:], r.url)
|
||||
}
|
||||
url2floc[sr.url] = sr.floc
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// listResources lists all subresources in res
|
||||
func (r *resource) listResources(res *resource) []*resource {
|
||||
var result []*resource
|
||||
prefix := res.floc + "/"
|
||||
for _, sr := range r.subresources {
|
||||
if strings.HasPrefix(sr.floc, prefix) {
|
||||
result = append(result, sr)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (r *resource) findResource(url string) *resource {
|
||||
if r.url == url {
|
||||
return r
|
||||
}
|
||||
for _, res := range r.subresources {
|
||||
if res.url == url {
|
||||
return res
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// resolve fragment f with sr as base
|
||||
func (r *resource) resolveFragment(c *Compiler, sr *resource, f string) (*resource, error) {
|
||||
if f == "#" || f == "#/" {
|
||||
return sr, nil
|
||||
}
|
||||
|
||||
// resolve by anchor
|
||||
if !strings.HasPrefix(f, "#/") {
|
||||
// check in given resource
|
||||
for _, anchor := range r.draft.anchors(sr.doc) {
|
||||
if anchor == f[1:] {
|
||||
return sr, nil
|
||||
}
|
||||
}
|
||||
|
||||
// check in subresources that has same base url
|
||||
prefix := sr.floc + "/"
|
||||
for _, res := range r.subresources {
|
||||
if strings.HasPrefix(res.floc, prefix) && r.baseURL(res.floc) == sr.url {
|
||||
for _, anchor := range r.draft.anchors(res.doc) {
|
||||
if anchor == f[1:] {
|
||||
return res, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// resolve by ptr
|
||||
floc := sr.floc + f[1:]
|
||||
if res, ok := r.subresources[floc]; ok {
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// non-standrad location
|
||||
doc := r.doc
|
||||
for _, item := range strings.Split(floc[2:], "/") {
|
||||
item = strings.Replace(item, "~1", "/", -1)
|
||||
item = strings.Replace(item, "~0", "~", -1)
|
||||
item, err := url.PathUnescape(item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch d := doc.(type) {
|
||||
case map[string]interface{}:
|
||||
if _, ok := d[item]; !ok {
|
||||
return nil, nil
|
||||
}
|
||||
doc = d[item]
|
||||
case []interface{}:
|
||||
index, err := strconv.Atoi(item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if index < 0 || index >= len(d) {
|
||||
return nil, nil
|
||||
}
|
||||
doc = d[index]
|
||||
default:
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
id, err := r.draft.resolveID(r.baseURL(floc), doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res := &resource{url: id, floc: floc, doc: doc}
|
||||
r.subresources[floc] = res
|
||||
if err := r.fillSubschemas(c, res); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (r *resource) baseURL(floc string) string {
|
||||
for {
|
||||
if sr, ok := r.subresources[floc]; ok {
|
||||
if sr.url != "" {
|
||||
return sr.url
|
||||
}
|
||||
}
|
||||
slash := strings.LastIndexByte(floc, '/')
|
||||
if slash == -1 {
|
||||
break
|
||||
}
|
||||
floc = floc[:slash]
|
||||
}
|
||||
return r.url
|
||||
}
|
||||
|
||||
// url helpers ---
|
||||
|
||||
func toAbs(s string) (string, error) {
|
||||
// if windows absolute file path, convert to file url
|
||||
// because: net/url parses driver name as scheme
|
||||
if runtime.GOOS == "windows" && len(s) >= 3 && s[1:3] == `:\` {
|
||||
s = "file:///" + filepath.ToSlash(s)
|
||||
}
|
||||
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if u.IsAbs() {
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// s is filepath
|
||||
if s, err = filepath.Abs(s); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if runtime.GOOS == "windows" {
|
||||
s = "file:///" + filepath.ToSlash(s)
|
||||
} else {
|
||||
s = "file://" + s
|
||||
}
|
||||
u, err = url.Parse(s) // to fix spaces in filepath
|
||||
return u.String(), err
|
||||
}
|
||||
|
||||
func resolveURL(base, ref string) (string, error) {
|
||||
if ref == "" {
|
||||
return base, nil
|
||||
}
|
||||
if strings.HasPrefix(ref, "urn:") {
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
refURL, err := url.Parse(ref)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if refURL.IsAbs() {
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
if strings.HasPrefix(base, "urn:") {
|
||||
base, _ = split(base)
|
||||
return base + ref, nil
|
||||
}
|
||||
|
||||
baseURL, err := url.Parse(base)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return baseURL.ResolveReference(refURL).String(), nil
|
||||
}
|
||||
|
||||
func split(uri string) (string, string) {
|
||||
hash := strings.IndexByte(uri, '#')
|
||||
if hash == -1 {
|
||||
return uri, "#"
|
||||
}
|
||||
f := uri[hash:]
|
||||
if f == "#/" {
|
||||
f = "#"
|
||||
}
|
||||
return uri[0:hash], f
|
||||
}
|
||||
|
||||
func (s *Schema) url() string {
|
||||
u, _ := split(s.Location)
|
||||
return u
|
||||
}
|
||||
|
||||
func (s *Schema) loc() string {
|
||||
_, f := split(s.Location)
|
||||
return f[1:]
|
||||
}
|
||||
|
||||
func unmarshal(r io.Reader) (interface{}, error) {
|
||||
decoder := json.NewDecoder(r)
|
||||
decoder.UseNumber()
|
||||
var doc interface{}
|
||||
if err := decoder.Decode(&doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if t, _ := decoder.Token(); t != nil {
|
||||
return nil, fmt.Errorf("invalid character %v after top-level value", t)
|
||||
}
|
||||
return doc, nil
|
||||
}
|
||||
826
vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
generated
vendored
Normal file
826
vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
generated
vendored
Normal file
|
|
@ -0,0 +1,826 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// A Schema represents compiled version of json-schema.
|
||||
type Schema struct {
|
||||
Location string // absolute location
|
||||
|
||||
meta *Schema
|
||||
vocab []string
|
||||
dynamicAnchors []*Schema
|
||||
|
||||
// type agnostic validations
|
||||
Format string
|
||||
format func(interface{}) bool
|
||||
Always *bool // always pass/fail. used when booleans are used as schemas in draft-07.
|
||||
Ref *Schema
|
||||
RecursiveAnchor bool
|
||||
RecursiveRef *Schema
|
||||
DynamicAnchor string
|
||||
DynamicRef *Schema
|
||||
Types []string // allowed types.
|
||||
Constant []interface{} // first element in slice is constant value. note: slice is used to capture nil constant.
|
||||
Enum []interface{} // allowed values.
|
||||
enumError string // error message for enum fail. captured here to avoid constructing error message every time.
|
||||
Not *Schema
|
||||
AllOf []*Schema
|
||||
AnyOf []*Schema
|
||||
OneOf []*Schema
|
||||
If *Schema
|
||||
Then *Schema // nil, when If is nil.
|
||||
Else *Schema // nil, when If is nil.
|
||||
|
||||
// object validations
|
||||
MinProperties int // -1 if not specified.
|
||||
MaxProperties int // -1 if not specified.
|
||||
Required []string // list of required properties.
|
||||
Properties map[string]*Schema
|
||||
PropertyNames *Schema
|
||||
RegexProperties bool // property names must be valid regex. used only in draft4 as workaround in metaschema.
|
||||
PatternProperties map[*regexp.Regexp]*Schema
|
||||
AdditionalProperties interface{} // nil or bool or *Schema.
|
||||
Dependencies map[string]interface{} // map value is *Schema or []string.
|
||||
DependentRequired map[string][]string
|
||||
DependentSchemas map[string]*Schema
|
||||
UnevaluatedProperties *Schema
|
||||
|
||||
// array validations
|
||||
MinItems int // -1 if not specified.
|
||||
MaxItems int // -1 if not specified.
|
||||
UniqueItems bool
|
||||
Items interface{} // nil or *Schema or []*Schema
|
||||
AdditionalItems interface{} // nil or bool or *Schema.
|
||||
PrefixItems []*Schema
|
||||
Items2020 *Schema // items keyword reintroduced in draft 2020-12
|
||||
Contains *Schema
|
||||
ContainsEval bool // whether any item in an array that passes validation of the contains schema is considered "evaluated"
|
||||
MinContains int // 1 if not specified
|
||||
MaxContains int // -1 if not specified
|
||||
UnevaluatedItems *Schema
|
||||
|
||||
// string validations
|
||||
MinLength int // -1 if not specified.
|
||||
MaxLength int // -1 if not specified.
|
||||
Pattern *regexp.Regexp
|
||||
ContentEncoding string
|
||||
decoder func(string) ([]byte, error)
|
||||
ContentMediaType string
|
||||
mediaType func([]byte) error
|
||||
ContentSchema *Schema
|
||||
|
||||
// number validators
|
||||
Minimum *big.Rat
|
||||
ExclusiveMinimum *big.Rat
|
||||
Maximum *big.Rat
|
||||
ExclusiveMaximum *big.Rat
|
||||
MultipleOf *big.Rat
|
||||
|
||||
// annotations. captured only when Compiler.ExtractAnnotations is true.
|
||||
Title string
|
||||
Description string
|
||||
Default interface{}
|
||||
Comment string
|
||||
ReadOnly bool
|
||||
WriteOnly bool
|
||||
Examples []interface{}
|
||||
Deprecated bool
|
||||
|
||||
// user defined extensions
|
||||
Extensions map[string]ExtSchema
|
||||
}
|
||||
|
||||
func (s *Schema) String() string {
|
||||
return s.Location
|
||||
}
|
||||
|
||||
func newSchema(url, floc string, doc interface{}) *Schema {
|
||||
// fill with default values
|
||||
s := &Schema{
|
||||
Location: url + floc,
|
||||
MinProperties: -1,
|
||||
MaxProperties: -1,
|
||||
MinItems: -1,
|
||||
MaxItems: -1,
|
||||
MinContains: 1,
|
||||
MaxContains: -1,
|
||||
MinLength: -1,
|
||||
MaxLength: -1,
|
||||
}
|
||||
|
||||
if doc, ok := doc.(map[string]interface{}); ok {
|
||||
if ra, ok := doc["$recursiveAnchor"]; ok {
|
||||
if ra, ok := ra.(bool); ok {
|
||||
s.RecursiveAnchor = ra
|
||||
}
|
||||
}
|
||||
if da, ok := doc["$dynamicAnchor"]; ok {
|
||||
if da, ok := da.(string); ok {
|
||||
s.DynamicAnchor = da
|
||||
}
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *Schema) hasVocab(name string) bool {
|
||||
if s == nil { // during bootstrap
|
||||
return true
|
||||
}
|
||||
if name == "core" {
|
||||
return true
|
||||
}
|
||||
for _, url := range s.vocab {
|
||||
if url == "https://json-schema.org/draft/2019-09/vocab/"+name {
|
||||
return true
|
||||
}
|
||||
if url == "https://json-schema.org/draft/2020-12/vocab/"+name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Validate validates given doc, against the json-schema s.
|
||||
//
|
||||
// the v must be the raw json value. for number precision
|
||||
// unmarshal with json.UseNumber().
|
||||
//
|
||||
// returns *ValidationError if v does not confirm with schema s.
|
||||
// returns InfiniteLoopError if it detects loop during validation.
|
||||
// returns InvalidJSONTypeError if it detects any non json value in v.
|
||||
func (s *Schema) Validate(v interface{}) (err error) {
|
||||
return s.validateValue(v, "")
|
||||
}
|
||||
|
||||
func (s *Schema) validateValue(v interface{}, vloc string) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
switch r := r.(type) {
|
||||
case InfiniteLoopError, InvalidJSONTypeError:
|
||||
err = r.(error)
|
||||
default:
|
||||
panic(r)
|
||||
}
|
||||
}
|
||||
}()
|
||||
if _, err := s.validate(nil, 0, "", v, vloc); err != nil {
|
||||
ve := ValidationError{
|
||||
KeywordLocation: "",
|
||||
AbsoluteKeywordLocation: s.Location,
|
||||
InstanceLocation: vloc,
|
||||
Message: fmt.Sprintf("doesn't validate with %s", s.Location),
|
||||
}
|
||||
return ve.causes(err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// validate validates given value v with this schema.
|
||||
func (s *Schema) validate(scope []schemaRef, vscope int, spath string, v interface{}, vloc string) (result validationResult, err error) {
|
||||
validationError := func(keywordPath string, format string, a ...interface{}) *ValidationError {
|
||||
return &ValidationError{
|
||||
KeywordLocation: keywordLocation(scope, keywordPath),
|
||||
AbsoluteKeywordLocation: joinPtr(s.Location, keywordPath),
|
||||
InstanceLocation: vloc,
|
||||
Message: fmt.Sprintf(format, a...),
|
||||
}
|
||||
}
|
||||
|
||||
sref := schemaRef{spath, s, false}
|
||||
if err := checkLoop(scope[len(scope)-vscope:], sref); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
scope = append(scope, sref)
|
||||
vscope++
|
||||
|
||||
// populate result
|
||||
switch v := v.(type) {
|
||||
case map[string]interface{}:
|
||||
result.unevalProps = make(map[string]struct{})
|
||||
for pname := range v {
|
||||
result.unevalProps[pname] = struct{}{}
|
||||
}
|
||||
case []interface{}:
|
||||
result.unevalItems = make(map[int]struct{})
|
||||
for i := range v {
|
||||
result.unevalItems[i] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
validate := func(sch *Schema, schPath string, v interface{}, vpath string) error {
|
||||
vloc := vloc
|
||||
if vpath != "" {
|
||||
vloc += "/" + vpath
|
||||
}
|
||||
_, err := sch.validate(scope, 0, schPath, v, vloc)
|
||||
return err
|
||||
}
|
||||
|
||||
validateInplace := func(sch *Schema, schPath string) error {
|
||||
vr, err := sch.validate(scope, vscope, schPath, v, vloc)
|
||||
if err == nil {
|
||||
// update result
|
||||
for pname := range result.unevalProps {
|
||||
if _, ok := vr.unevalProps[pname]; !ok {
|
||||
delete(result.unevalProps, pname)
|
||||
}
|
||||
}
|
||||
for i := range result.unevalItems {
|
||||
if _, ok := vr.unevalItems[i]; !ok {
|
||||
delete(result.unevalItems, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if s.Always != nil {
|
||||
if !*s.Always {
|
||||
return result, validationError("", "not allowed")
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
if len(s.Types) > 0 {
|
||||
vType := jsonType(v)
|
||||
matched := false
|
||||
for _, t := range s.Types {
|
||||
if vType == t {
|
||||
matched = true
|
||||
break
|
||||
} else if t == "integer" && vType == "number" {
|
||||
num, _ := new(big.Rat).SetString(fmt.Sprint(v))
|
||||
if num.IsInt() {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return result, validationError("type", "expected %s, but got %s", strings.Join(s.Types, " or "), vType)
|
||||
}
|
||||
}
|
||||
|
||||
var errors []error
|
||||
|
||||
if len(s.Constant) > 0 {
|
||||
if !equals(v, s.Constant[0]) {
|
||||
switch jsonType(s.Constant[0]) {
|
||||
case "object", "array":
|
||||
errors = append(errors, validationError("const", "const failed"))
|
||||
default:
|
||||
errors = append(errors, validationError("const", "value must be %#v", s.Constant[0]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.Enum) > 0 {
|
||||
matched := false
|
||||
for _, item := range s.Enum {
|
||||
if equals(v, item) {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
errors = append(errors, validationError("enum", s.enumError))
|
||||
}
|
||||
}
|
||||
|
||||
if s.format != nil && !s.format(v) {
|
||||
var val = v
|
||||
if v, ok := v.(string); ok {
|
||||
val = quote(v)
|
||||
}
|
||||
errors = append(errors, validationError("format", "%v is not valid %s", val, quote(s.Format)))
|
||||
}
|
||||
|
||||
switch v := v.(type) {
|
||||
case map[string]interface{}:
|
||||
if s.MinProperties != -1 && len(v) < s.MinProperties {
|
||||
errors = append(errors, validationError("minProperties", "minimum %d properties allowed, but found %d properties", s.MinProperties, len(v)))
|
||||
}
|
||||
if s.MaxProperties != -1 && len(v) > s.MaxProperties {
|
||||
errors = append(errors, validationError("maxProperties", "maximum %d properties allowed, but found %d properties", s.MaxProperties, len(v)))
|
||||
}
|
||||
if len(s.Required) > 0 {
|
||||
var missing []string
|
||||
for _, pname := range s.Required {
|
||||
if _, ok := v[pname]; !ok {
|
||||
missing = append(missing, quote(pname))
|
||||
}
|
||||
}
|
||||
if len(missing) > 0 {
|
||||
errors = append(errors, validationError("required", "missing properties: %s", strings.Join(missing, ", ")))
|
||||
}
|
||||
}
|
||||
|
||||
for pname, sch := range s.Properties {
|
||||
if pvalue, ok := v[pname]; ok {
|
||||
delete(result.unevalProps, pname)
|
||||
if err := validate(sch, "properties/"+escape(pname), pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.PropertyNames != nil {
|
||||
for pname := range v {
|
||||
if err := validate(s.PropertyNames, "propertyNames", pname, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.RegexProperties {
|
||||
for pname := range v {
|
||||
if !isRegex(pname) {
|
||||
errors = append(errors, validationError("", "patternProperty %s is not valid regex", quote(pname)))
|
||||
}
|
||||
}
|
||||
}
|
||||
for pattern, sch := range s.PatternProperties {
|
||||
for pname, pvalue := range v {
|
||||
if pattern.MatchString(pname) {
|
||||
delete(result.unevalProps, pname)
|
||||
if err := validate(sch, "patternProperties/"+escape(pattern.String()), pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if s.AdditionalProperties != nil {
|
||||
if allowed, ok := s.AdditionalProperties.(bool); ok {
|
||||
if !allowed && len(result.unevalProps) > 0 {
|
||||
errors = append(errors, validationError("additionalProperties", "additionalProperties %s not allowed", result.unevalPnames()))
|
||||
}
|
||||
} else {
|
||||
schema := s.AdditionalProperties.(*Schema)
|
||||
for pname := range result.unevalProps {
|
||||
if pvalue, ok := v[pname]; ok {
|
||||
if err := validate(schema, "additionalProperties", pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
result.unevalProps = nil
|
||||
}
|
||||
for dname, dvalue := range s.Dependencies {
|
||||
if _, ok := v[dname]; ok {
|
||||
switch dvalue := dvalue.(type) {
|
||||
case *Schema:
|
||||
if err := validateInplace(dvalue, "dependencies/"+escape(dname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
case []string:
|
||||
for i, pname := range dvalue {
|
||||
if _, ok := v[pname]; !ok {
|
||||
errors = append(errors, validationError("dependencies/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for dname, dvalue := range s.DependentRequired {
|
||||
if _, ok := v[dname]; ok {
|
||||
for i, pname := range dvalue {
|
||||
if _, ok := v[pname]; !ok {
|
||||
errors = append(errors, validationError("dependentRequired/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for dname, sch := range s.DependentSchemas {
|
||||
if _, ok := v[dname]; ok {
|
||||
if err := validateInplace(sch, "dependentSchemas/"+escape(dname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case []interface{}:
|
||||
if s.MinItems != -1 && len(v) < s.MinItems {
|
||||
errors = append(errors, validationError("minItems", "minimum %d items required, but found %d items", s.MinItems, len(v)))
|
||||
}
|
||||
if s.MaxItems != -1 && len(v) > s.MaxItems {
|
||||
errors = append(errors, validationError("maxItems", "maximum %d items required, but found %d items", s.MaxItems, len(v)))
|
||||
}
|
||||
if s.UniqueItems {
|
||||
for i := 1; i < len(v); i++ {
|
||||
for j := 0; j < i; j++ {
|
||||
if equals(v[i], v[j]) {
|
||||
errors = append(errors, validationError("uniqueItems", "items at index %d and %d are equal", j, i))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// items + additionalItems
|
||||
switch items := s.Items.(type) {
|
||||
case *Schema:
|
||||
for i, item := range v {
|
||||
if err := validate(items, "items", item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
result.unevalItems = nil
|
||||
case []*Schema:
|
||||
for i, item := range v {
|
||||
if i < len(items) {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(items[i], "items/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else if sch, ok := s.AdditionalItems.(*Schema); ok {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(sch, "additionalItems", item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if additionalItems, ok := s.AdditionalItems.(bool); ok {
|
||||
if additionalItems {
|
||||
result.unevalItems = nil
|
||||
} else if len(v) > len(items) {
|
||||
errors = append(errors, validationError("additionalItems", "only %d items are allowed, but found %d items", len(items), len(v)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// prefixItems + items
|
||||
for i, item := range v {
|
||||
if i < len(s.PrefixItems) {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(s.PrefixItems[i], "prefixItems/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else if s.Items2020 != nil {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(s.Items2020, "items", item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// contains + minContains + maxContains
|
||||
if s.Contains != nil && (s.MinContains != -1 || s.MaxContains != -1) {
|
||||
matched := 0
|
||||
var causes []error
|
||||
for i, item := range v {
|
||||
if err := validate(s.Contains, "contains", item, strconv.Itoa(i)); err != nil {
|
||||
causes = append(causes, err)
|
||||
} else {
|
||||
matched++
|
||||
if s.ContainsEval {
|
||||
delete(result.unevalItems, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
if s.MinContains != -1 && matched < s.MinContains {
|
||||
errors = append(errors, validationError("minContains", "valid must be >= %d, but got %d", s.MinContains, matched).add(causes...))
|
||||
}
|
||||
if s.MaxContains != -1 && matched > s.MaxContains {
|
||||
errors = append(errors, validationError("maxContains", "valid must be <= %d, but got %d", s.MaxContains, matched))
|
||||
}
|
||||
}
|
||||
|
||||
case string:
|
||||
// minLength + maxLength
|
||||
if s.MinLength != -1 || s.MaxLength != -1 {
|
||||
length := utf8.RuneCount([]byte(v))
|
||||
if s.MinLength != -1 && length < s.MinLength {
|
||||
errors = append(errors, validationError("minLength", "length must be >= %d, but got %d", s.MinLength, length))
|
||||
}
|
||||
if s.MaxLength != -1 && length > s.MaxLength {
|
||||
errors = append(errors, validationError("maxLength", "length must be <= %d, but got %d", s.MaxLength, length))
|
||||
}
|
||||
}
|
||||
|
||||
if s.Pattern != nil && !s.Pattern.MatchString(v) {
|
||||
errors = append(errors, validationError("pattern", "does not match pattern %s", quote(s.Pattern.String())))
|
||||
}
|
||||
|
||||
// contentEncoding + contentMediaType
|
||||
if s.decoder != nil || s.mediaType != nil {
|
||||
decoded := s.ContentEncoding == ""
|
||||
var content []byte
|
||||
if s.decoder != nil {
|
||||
b, err := s.decoder(v)
|
||||
if err != nil {
|
||||
errors = append(errors, validationError("contentEncoding", "value is not %s encoded", s.ContentEncoding))
|
||||
} else {
|
||||
content, decoded = b, true
|
||||
}
|
||||
}
|
||||
if decoded && s.mediaType != nil {
|
||||
if s.decoder == nil {
|
||||
content = []byte(v)
|
||||
}
|
||||
if err := s.mediaType(content); err != nil {
|
||||
errors = append(errors, validationError("contentMediaType", "value is not of mediatype %s", quote(s.ContentMediaType)))
|
||||
}
|
||||
}
|
||||
if decoded && s.ContentSchema != nil {
|
||||
contentJSON, err := unmarshal(bytes.NewReader(content))
|
||||
if err != nil {
|
||||
errors = append(errors, validationError("contentSchema", "value is not valid json"))
|
||||
} else {
|
||||
err := validate(s.ContentSchema, "contentSchema", contentJSON, "")
|
||||
if err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
|
||||
// lazy convert to *big.Rat to avoid allocation
|
||||
var numVal *big.Rat
|
||||
num := func() *big.Rat {
|
||||
if numVal == nil {
|
||||
numVal, _ = new(big.Rat).SetString(fmt.Sprint(v))
|
||||
}
|
||||
return numVal
|
||||
}
|
||||
f64 := func(r *big.Rat) float64 {
|
||||
f, _ := r.Float64()
|
||||
return f
|
||||
}
|
||||
if s.Minimum != nil && num().Cmp(s.Minimum) < 0 {
|
||||
errors = append(errors, validationError("minimum", "must be >= %v but found %v", f64(s.Minimum), v))
|
||||
}
|
||||
if s.ExclusiveMinimum != nil && num().Cmp(s.ExclusiveMinimum) <= 0 {
|
||||
errors = append(errors, validationError("exclusiveMinimum", "must be > %v but found %v", f64(s.ExclusiveMinimum), v))
|
||||
}
|
||||
if s.Maximum != nil && num().Cmp(s.Maximum) > 0 {
|
||||
errors = append(errors, validationError("maximum", "must be <= %v but found %v", f64(s.Maximum), v))
|
||||
}
|
||||
if s.ExclusiveMaximum != nil && num().Cmp(s.ExclusiveMaximum) >= 0 {
|
||||
errors = append(errors, validationError("exclusiveMaximum", "must be < %v but found %v", f64(s.ExclusiveMaximum), v))
|
||||
}
|
||||
if s.MultipleOf != nil {
|
||||
if q := new(big.Rat).Quo(num(), s.MultipleOf); !q.IsInt() {
|
||||
errors = append(errors, validationError("multipleOf", "%v not multipleOf %v", v, f64(s.MultipleOf)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// $ref + $recursiveRef + $dynamicRef
|
||||
validateRef := func(sch *Schema, refPath string) error {
|
||||
if sch != nil {
|
||||
if err := validateInplace(sch, refPath); err != nil {
|
||||
var url = sch.Location
|
||||
if s.url() == sch.url() {
|
||||
url = sch.loc()
|
||||
}
|
||||
return validationError(refPath, "doesn't validate with %s", quote(url)).causes(err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := validateRef(s.Ref, "$ref"); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
if s.RecursiveRef != nil {
|
||||
sch := s.RecursiveRef
|
||||
if sch.RecursiveAnchor {
|
||||
// recursiveRef based on scope
|
||||
for _, e := range scope {
|
||||
if e.schema.RecursiveAnchor {
|
||||
sch = e.schema
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := validateRef(sch, "$recursiveRef"); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
if s.DynamicRef != nil {
|
||||
sch := s.DynamicRef
|
||||
if sch.DynamicAnchor != "" {
|
||||
// dynamicRef based on scope
|
||||
for i := len(scope) - 1; i >= 0; i-- {
|
||||
sr := scope[i]
|
||||
if sr.discard {
|
||||
break
|
||||
}
|
||||
for _, da := range sr.schema.dynamicAnchors {
|
||||
if da.DynamicAnchor == s.DynamicRef.DynamicAnchor && da != s.DynamicRef {
|
||||
sch = da
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := validateRef(sch, "$dynamicRef"); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
|
||||
if s.Not != nil && validateInplace(s.Not, "not") == nil {
|
||||
errors = append(errors, validationError("not", "not failed"))
|
||||
}
|
||||
|
||||
for i, sch := range s.AllOf {
|
||||
schPath := "allOf/" + strconv.Itoa(i)
|
||||
if err := validateInplace(sch, schPath); err != nil {
|
||||
errors = append(errors, validationError(schPath, "allOf failed").add(err))
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.AnyOf) > 0 {
|
||||
matched := false
|
||||
var causes []error
|
||||
for i, sch := range s.AnyOf {
|
||||
if err := validateInplace(sch, "anyOf/"+strconv.Itoa(i)); err == nil {
|
||||
matched = true
|
||||
} else {
|
||||
causes = append(causes, err)
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
errors = append(errors, validationError("anyOf", "anyOf failed").add(causes...))
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.OneOf) > 0 {
|
||||
matched := -1
|
||||
var causes []error
|
||||
for i, sch := range s.OneOf {
|
||||
if err := validateInplace(sch, "oneOf/"+strconv.Itoa(i)); err == nil {
|
||||
if matched == -1 {
|
||||
matched = i
|
||||
} else {
|
||||
errors = append(errors, validationError("oneOf", "valid against schemas at indexes %d and %d", matched, i))
|
||||
break
|
||||
}
|
||||
} else {
|
||||
causes = append(causes, err)
|
||||
}
|
||||
}
|
||||
if matched == -1 {
|
||||
errors = append(errors, validationError("oneOf", "oneOf failed").add(causes...))
|
||||
}
|
||||
}
|
||||
|
||||
// if + then + else
|
||||
if s.If != nil {
|
||||
err := validateInplace(s.If, "if")
|
||||
// "if" leaves dynamic scope
|
||||
scope[len(scope)-1].discard = true
|
||||
if err == nil {
|
||||
if s.Then != nil {
|
||||
if err := validateInplace(s.Then, "then"); err != nil {
|
||||
errors = append(errors, validationError("then", "if-then failed").add(err))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if s.Else != nil {
|
||||
if err := validateInplace(s.Else, "else"); err != nil {
|
||||
errors = append(errors, validationError("else", "if-else failed").add(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
// restore dynamic scope
|
||||
scope[len(scope)-1].discard = false
|
||||
}
|
||||
|
||||
for _, ext := range s.Extensions {
|
||||
if err := ext.Validate(ValidationContext{result, validate, validateInplace, validationError}, v); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
|
||||
// UnevaluatedProperties + UnevaluatedItems
|
||||
switch v := v.(type) {
|
||||
case map[string]interface{}:
|
||||
if s.UnevaluatedProperties != nil {
|
||||
for pname := range result.unevalProps {
|
||||
if pvalue, ok := v[pname]; ok {
|
||||
if err := validate(s.UnevaluatedProperties, "UnevaluatedProperties", pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
result.unevalProps = nil
|
||||
}
|
||||
case []interface{}:
|
||||
if s.UnevaluatedItems != nil {
|
||||
for i := range result.unevalItems {
|
||||
if err := validate(s.UnevaluatedItems, "UnevaluatedItems", v[i], strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
result.unevalItems = nil
|
||||
}
|
||||
}
|
||||
|
||||
switch len(errors) {
|
||||
case 0:
|
||||
return result, nil
|
||||
case 1:
|
||||
return result, errors[0]
|
||||
default:
|
||||
return result, validationError("", "").add(errors...) // empty message, used just for wrapping
|
||||
}
|
||||
}
|
||||
|
||||
type validationResult struct {
|
||||
unevalProps map[string]struct{}
|
||||
unevalItems map[int]struct{}
|
||||
}
|
||||
|
||||
func (vr validationResult) unevalPnames() string {
|
||||
pnames := make([]string, 0, len(vr.unevalProps))
|
||||
for pname := range vr.unevalProps {
|
||||
pnames = append(pnames, quote(pname))
|
||||
}
|
||||
return strings.Join(pnames, ", ")
|
||||
}
|
||||
|
||||
// jsonType returns the json type of given value v.
|
||||
//
|
||||
// It panics if the given value is not valid json value
|
||||
func jsonType(v interface{}) string {
|
||||
switch v.(type) {
|
||||
case nil:
|
||||
return "null"
|
||||
case bool:
|
||||
return "boolean"
|
||||
case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
|
||||
return "number"
|
||||
case string:
|
||||
return "string"
|
||||
case []interface{}:
|
||||
return "array"
|
||||
case map[string]interface{}:
|
||||
return "object"
|
||||
}
|
||||
panic(InvalidJSONTypeError(fmt.Sprintf("%T", v)))
|
||||
}
|
||||
|
||||
// equals tells if given two json values are equal or not.
|
||||
func equals(v1, v2 interface{}) bool {
|
||||
v1Type := jsonType(v1)
|
||||
if v1Type != jsonType(v2) {
|
||||
return false
|
||||
}
|
||||
switch v1Type {
|
||||
case "array":
|
||||
arr1, arr2 := v1.([]interface{}), v2.([]interface{})
|
||||
if len(arr1) != len(arr2) {
|
||||
return false
|
||||
}
|
||||
for i := range arr1 {
|
||||
if !equals(arr1[i], arr2[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case "object":
|
||||
obj1, obj2 := v1.(map[string]interface{}), v2.(map[string]interface{})
|
||||
if len(obj1) != len(obj2) {
|
||||
return false
|
||||
}
|
||||
for k, v1 := range obj1 {
|
||||
if v2, ok := obj2[k]; ok {
|
||||
if !equals(v1, v2) {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case "number":
|
||||
num1, _ := new(big.Rat).SetString(fmt.Sprint(v1))
|
||||
num2, _ := new(big.Rat).SetString(fmt.Sprint(v2))
|
||||
return num1.Cmp(num2) == 0
|
||||
default:
|
||||
return v1 == v2
|
||||
}
|
||||
}
|
||||
|
||||
// escape converts given token to valid json-pointer token
|
||||
func escape(token string) string {
|
||||
token = strings.ReplaceAll(token, "~", "~0")
|
||||
token = strings.ReplaceAll(token, "/", "~1")
|
||||
return url.PathEscape(token)
|
||||
}
|
||||
7
vendor/github.com/xeipuuv/gojsonschema/go.mod
generated
vendored
7
vendor/github.com/xeipuuv/gojsonschema/go.mod
generated
vendored
|
|
@ -1,7 +0,0 @@
|
|||
module github.com/xeipuuv/gojsonschema
|
||||
|
||||
require (
|
||||
github.com/stretchr/testify v1.3.0
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f // indirect
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415
|
||||
)
|
||||
11
vendor/github.com/xeipuuv/gojsonschema/go.sum
generated
vendored
11
vendor/github.com/xeipuuv/gojsonschema/go.sum
generated
vendored
|
|
@ -1,11 +0,0 @@
|
|||
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
||||
5
vendor/gopkg.in/yaml.v2/go.mod
generated
vendored
5
vendor/gopkg.in/yaml.v2/go.mod
generated
vendored
|
|
@ -1,5 +0,0 @@
|
|||
module gopkg.in/yaml.v2
|
||||
|
||||
go 1.15
|
||||
|
||||
require gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405
|
||||
10
vendor/modules.txt
vendored
10
vendor/modules.txt
vendored
|
|
@ -1,6 +1,6 @@
|
|||
# github.com/beevik/etree v1.1.0
|
||||
## explicit
|
||||
github.com/beevik/etree
|
||||
# github.com/santhosh-tekuri/jsonschema/v5 v5.1.1
|
||||
## explicit; go 1.15
|
||||
github.com/santhosh-tekuri/jsonschema/v5
|
||||
# github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb
|
||||
## explicit
|
||||
github.com/xeipuuv/gojsonpointer
|
||||
|
|
@ -11,8 +11,8 @@ github.com/xeipuuv/gojsonreference
|
|||
## explicit
|
||||
github.com/xeipuuv/gojsonschema
|
||||
# gopkg.in/yaml.v2 v2.4.0
|
||||
## explicit
|
||||
## explicit; go 1.15
|
||||
gopkg.in/yaml.v2
|
||||
# sigs.k8s.io/yaml v1.2.0
|
||||
## explicit
|
||||
## explicit; go 1.12
|
||||
sigs.k8s.io/yaml
|
||||
|
|
|
|||
8
vendor/sigs.k8s.io/yaml/go.mod
generated
vendored
8
vendor/sigs.k8s.io/yaml/go.mod
generated
vendored
|
|
@ -1,8 +0,0 @@
|
|||
module sigs.k8s.io/yaml
|
||||
|
||||
go 1.12
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
gopkg.in/yaml.v2 v2.2.8
|
||||
)
|
||||
9
vendor/sigs.k8s.io/yaml/go.sum
generated
vendored
9
vendor/sigs.k8s.io/yaml/go.sum
generated
vendored
|
|
@ -1,9 +0,0 @@
|
|||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
Loading…
Reference in a new issue