mirror of
https://github.com/yannh/kubeconform.git
synced 2026-05-06 10:14:27 +00:00
WIP - upgrade jsonschema to v6
This commit is contained in:
parent
1bd44986dd
commit
35718141ea
113 changed files with 24459 additions and 4755 deletions
4
vendor/github.com/santhosh-tekuri/jsonschema/v6/.gitmodules
generated
vendored
Normal file
4
vendor/github.com/santhosh-tekuri/jsonschema/v6/.gitmodules
generated
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
[submodule "testdata/JSON-Schema-Test-Suite"]
|
||||
path = testdata/JSON-Schema-Test-Suite
|
||||
url = https://github.com/json-schema-org/JSON-Schema-Test-Suite.git
|
||||
branch = main
|
||||
5
vendor/github.com/santhosh-tekuri/jsonschema/v6/.golangci.yml
generated
vendored
Normal file
5
vendor/github.com/santhosh-tekuri/jsonschema/v6/.golangci.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
linters:
|
||||
enable:
|
||||
- nakedret
|
||||
- errname
|
||||
- godot
|
||||
7
vendor/github.com/santhosh-tekuri/jsonschema/v6/.pre-commit-hooks.yaml
generated
vendored
Normal file
7
vendor/github.com/santhosh-tekuri/jsonschema/v6/.pre-commit-hooks.yaml
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
- id: jsonschema-validate
|
||||
name: Validate JSON against JSON Schema
|
||||
description: ensure json files follow specified JSON Schema
|
||||
entry: jv
|
||||
language: golang
|
||||
additional_dependencies:
|
||||
- ./cmd/jv
|
||||
175
vendor/github.com/santhosh-tekuri/jsonschema/v6/LICENSE
generated
vendored
Normal file
175
vendor/github.com/santhosh-tekuri/jsonschema/v6/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
86
vendor/github.com/santhosh-tekuri/jsonschema/v6/README.md
generated
vendored
Normal file
86
vendor/github.com/santhosh-tekuri/jsonschema/v6/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
# jsonschema v6.0.0
|
||||
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v6)
|
||||
[](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema/v6)
|
||||
[](https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml)
|
||||
[](https://codecov.io/gh/santhosh-tekuri/jsonschema/tree/boon)
|
||||
|
||||
see [godoc](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v6) for examples
|
||||
|
||||
## Library Features
|
||||
|
||||
- [x] pass [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite) excluding optional(compare with other impls at [bowtie](https://bowtie-json-schema.github.io/bowtie/#))
|
||||
- [x] [](https://bowtie.report/#/dialects/draft4)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft6)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft7)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft2019-09)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft2020-12)
|
||||
- [x] detect infinite loop traps
|
||||
- [x] `$schema` cycle
|
||||
- [x] validation cycle
|
||||
- [x] custom `$schema` url
|
||||
- [x] vocabulary based validation
|
||||
- [x] custom regex engine
|
||||
- [x] format assertions
|
||||
- [x] flag to enable in draft >= 2019-09
|
||||
- [x] custom format registration
|
||||
- [x] built-in formats
|
||||
- [x] regex, uuid
|
||||
- [x] ipv4, ipv6
|
||||
- [x] hostname, email
|
||||
- [x] date, time, date-time, duration
|
||||
- [x] json-pointer, relative-json-pointer
|
||||
- [x] uri, uri-reference, uri-template
|
||||
- [x] iri, iri-reference
|
||||
- [x] period, semver
|
||||
- [x] content assertions
|
||||
- [x] flag to enable in draft >= 7
|
||||
- [x] contentEncoding
|
||||
- [x] base64
|
||||
- [x] custom
|
||||
- [x] contentMediaType
|
||||
- [x] application/json
|
||||
- [x] custom
|
||||
- [x] contentSchema
|
||||
- [x] errors
|
||||
- [x] introspectable
|
||||
- [x] hierarchy
|
||||
- [x] alternative display with `#`
|
||||
- [x] output
|
||||
- [x] flag
|
||||
- [x] basic
|
||||
- [x] detailed
|
||||
- [x] custom vocabulary
|
||||
- enable via `$vocabulary` for draft >=2019-19
|
||||
- enable via flag for draft <= 7
|
||||
- [x] mixed dialect support
|
||||
|
||||
## CLI
|
||||
|
||||
to install: `go install github.com/santhosh-tekuri/jsonschema/cmd/jv@latest`
|
||||
|
||||
```
|
||||
Usage: jv [OPTIONS] SCHEMA [INSTANCE...]
|
||||
|
||||
Options:
|
||||
-c, --assert-content Enable content assertions with draft >= 7
|
||||
-f, --assert-format Enable format assertions with draft >= 2019
|
||||
--cacert pem-file Use the specified pem-file to verify the peer. The file may contain multiple CA certificates
|
||||
-d, --draft version Draft version used when '$schema' is missing. Valid values 4, 6, 7, 2019, 2020 (default 2020)
|
||||
-h, --help Print help information
|
||||
-k, --insecure Use insecure TLS connection
|
||||
-o, --output format Output format. Valid values simple, alt, flag, basic, detailed (default "simple")
|
||||
-q, --quiet Do not print errors
|
||||
-v, --version Print build information
|
||||
```
|
||||
|
||||
- [x] exit code `1` for validation erros, `2` for usage errors
|
||||
- [x] validate both schema and multiple instances
|
||||
- [x] support both json and yaml files
|
||||
- [x] support standard input, use `-`
|
||||
- [x] quite mode with parsable output
|
||||
- [x] http(s) url support
|
||||
- [x] custom certs for validation, use `--cacert`
|
||||
- [x] flag to skip certificate verification, use `--insecure`
|
||||
|
||||
332
vendor/github.com/santhosh-tekuri/jsonschema/v6/compiler.go
generated
vendored
Normal file
332
vendor/github.com/santhosh-tekuri/jsonschema/v6/compiler.go
generated
vendored
Normal file
|
|
@ -0,0 +1,332 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"slices"
|
||||
)
|
||||
|
||||
// Compiler compiles json schema into *Schema.
|
||||
type Compiler struct {
|
||||
schemas map[urlPtr]*Schema
|
||||
roots *roots
|
||||
formats map[string]*Format
|
||||
decoders map[string]*Decoder
|
||||
mediaTypes map[string]*MediaType
|
||||
assertFormat bool
|
||||
assertContent bool
|
||||
}
|
||||
|
||||
// NewCompiler create Compiler Object.
|
||||
func NewCompiler() *Compiler {
|
||||
return &Compiler{
|
||||
schemas: map[urlPtr]*Schema{},
|
||||
roots: newRoots(),
|
||||
formats: map[string]*Format{},
|
||||
decoders: map[string]*Decoder{},
|
||||
mediaTypes: map[string]*MediaType{},
|
||||
assertFormat: false,
|
||||
assertContent: false,
|
||||
}
|
||||
}
|
||||
|
||||
// DefaultDraft overrides the draft used to
|
||||
// compile schemas without `$schema` field.
|
||||
//
|
||||
// By default, this library uses the latest
|
||||
// draft supported.
|
||||
//
|
||||
// The use of this option is HIGHLY encouraged
|
||||
// to ensure continued correct operation of your
|
||||
// schema. The current default value will not stay
|
||||
// the same overtime.
|
||||
func (c *Compiler) DefaultDraft(d *Draft) {
|
||||
c.roots.defaultDraft = d
|
||||
}
|
||||
|
||||
// AssertFormat always enables format assertions.
|
||||
//
|
||||
// Default Behavior:
|
||||
// for draft-07: enabled.
|
||||
// for draft/2019-09: disabled unless metaschema says `format` vocabulary is required.
|
||||
// for draft/2020-12: disabled unless metaschema says `format-assertion` vocabulary is required.
|
||||
func (c *Compiler) AssertFormat() {
|
||||
c.assertFormat = true
|
||||
}
|
||||
|
||||
// AssertContent enables content assertions.
|
||||
//
|
||||
// Content assertions include keywords:
|
||||
// - contentEncoding
|
||||
// - contentMediaType
|
||||
// - contentSchema
|
||||
//
|
||||
// Default behavior is always disabled.
|
||||
func (c *Compiler) AssertContent() {
|
||||
c.assertContent = true
|
||||
}
|
||||
|
||||
// RegisterFormat registers custom format.
|
||||
//
|
||||
// NOTE:
|
||||
// - "regex" format can not be overridden
|
||||
// - format assertions are disabled for draft >= 2019-09
|
||||
// see [Compiler.AssertFormat]
|
||||
func (c *Compiler) RegisterFormat(f *Format) {
|
||||
if f.Name != "regex" {
|
||||
c.formats[f.Name] = f
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterContentEncoding registers custom contentEncoding.
|
||||
//
|
||||
// NOTE: content assertions are disabled by default.
|
||||
// see [Compiler.AssertContent].
|
||||
func (c *Compiler) RegisterContentEncoding(d *Decoder) {
|
||||
c.decoders[d.Name] = d
|
||||
}
|
||||
|
||||
// RegisterContentMediaType registers custom contentMediaType.
|
||||
//
|
||||
// NOTE: content assertions are disabled by default.
|
||||
// see [Compiler.AssertContent].
|
||||
func (c *Compiler) RegisterContentMediaType(mt *MediaType) {
|
||||
c.mediaTypes[mt.Name] = mt
|
||||
}
|
||||
|
||||
// RegisterVocabulary registers custom vocabulary.
|
||||
//
|
||||
// NOTE:
|
||||
// - vocabularies are disabled for draft >= 2019-09
|
||||
// see [Compiler.AssertVocabs]
|
||||
func (c *Compiler) RegisterVocabulary(vocab *Vocabulary) {
|
||||
c.roots.vocabularies[vocab.URL] = vocab
|
||||
}
|
||||
|
||||
// AssertVocabs always enables user-defined vocabularies assertions.
|
||||
//
|
||||
// Default Behavior:
|
||||
// for draft-07: enabled.
|
||||
// for draft/2019-09: disabled unless metaschema enables a vocabulary.
|
||||
// for draft/2020-12: disabled unless metaschema enables a vocabulary.
|
||||
func (c *Compiler) AssertVocabs() {
|
||||
c.roots.assertVocabs = true
|
||||
}
|
||||
|
||||
// AddResource adds schema resource which gets used later in reference
|
||||
// resolution.
|
||||
//
|
||||
// The argument url can be file path or url. Any fragment in url is ignored.
|
||||
// The argument doc must be valid json value.
|
||||
func (c *Compiler) AddResource(url string, doc any) error {
|
||||
uf, err := absolute(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if isMeta(string(uf.url)) {
|
||||
return &ResourceExistsError{string(uf.url)}
|
||||
}
|
||||
if !c.roots.loader.add(uf.url, doc) {
|
||||
return &ResourceExistsError{string(uf.url)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UseLoader overrides the default [URLLoader] used
|
||||
// to load schema resources.
|
||||
func (c *Compiler) UseLoader(loader URLLoader) {
|
||||
c.roots.loader.loader = loader
|
||||
}
|
||||
|
||||
// UseRegexpEngine changes the regexp-engine used.
|
||||
// By default it uses regexp package from go standard
|
||||
// library.
|
||||
//
|
||||
// NOTE: must be called before compiling any schemas.
|
||||
func (c *Compiler) UseRegexpEngine(engine RegexpEngine) {
|
||||
if engine == nil {
|
||||
engine = goRegexpCompile
|
||||
}
|
||||
c.roots.regexpEngine = engine
|
||||
}
|
||||
|
||||
func (c *Compiler) enqueue(q *queue, up urlPtr) *Schema {
|
||||
if sch, ok := c.schemas[up]; ok {
|
||||
// already got compiled
|
||||
return sch
|
||||
}
|
||||
if sch := q.get(up); sch != nil {
|
||||
return sch
|
||||
}
|
||||
sch := newSchema(up)
|
||||
q.append(sch)
|
||||
return sch
|
||||
}
|
||||
|
||||
// MustCompile is like [Compile] but panics if compilation fails.
|
||||
// It simplifies safe initialization of global variables holding
|
||||
// compiled schema.
|
||||
func (c *Compiler) MustCompile(loc string) *Schema {
|
||||
sch, err := c.Compile(loc)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("jsonschema: Compile(%q): %v", loc, err))
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
// Compile compiles json-schema at given loc.
|
||||
func (c *Compiler) Compile(loc string) (*Schema, error) {
|
||||
uf, err := absolute(loc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
up, err := c.roots.resolveFragment(*uf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.doCompile(up)
|
||||
}
|
||||
|
||||
func (c *Compiler) doCompile(up urlPtr) (*Schema, error) {
|
||||
q := &queue{}
|
||||
compiled := 0
|
||||
|
||||
c.enqueue(q, up)
|
||||
for q.len() > compiled {
|
||||
sch := q.at(compiled)
|
||||
if err := c.roots.ensureSubschema(sch.up); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := c.roots.roots[sch.up.url]
|
||||
v, err := sch.up.lookup(r.doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := c.compileValue(v, sch, r, q); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
compiled++
|
||||
}
|
||||
for _, sch := range *q {
|
||||
c.schemas[sch.up] = sch
|
||||
}
|
||||
return c.schemas[up], nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compileValue(v any, sch *Schema, r *root, q *queue) error {
|
||||
res := r.resource(sch.up.ptr)
|
||||
sch.DraftVersion = res.dialect.draft.version
|
||||
|
||||
base := urlPtr{sch.up.url, res.ptr}
|
||||
sch.resource = c.enqueue(q, base)
|
||||
|
||||
// if resource, enqueue dynamic anchors for compilation
|
||||
if sch.DraftVersion >= 2020 && sch.up == sch.resource.up {
|
||||
res := r.resource(sch.up.ptr)
|
||||
for anchor, anchorPtr := range res.anchors {
|
||||
if slices.Contains(res.dynamicAnchors, anchor) {
|
||||
up := urlPtr{sch.up.url, anchorPtr}
|
||||
danchorSch := c.enqueue(q, up)
|
||||
if sch.dynamicAnchors == nil {
|
||||
sch.dynamicAnchors = map[string]*Schema{}
|
||||
}
|
||||
sch.dynamicAnchors[string(anchor)] = danchorSch
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch v := v.(type) {
|
||||
case bool:
|
||||
sch.Bool = &v
|
||||
case map[string]any:
|
||||
if err := c.compileObject(v, sch, r, q); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
sch.allPropsEvaluated = sch.AdditionalProperties != nil
|
||||
if sch.DraftVersion < 2020 {
|
||||
sch.allItemsEvaluated = sch.AdditionalItems != nil
|
||||
switch items := sch.Items.(type) {
|
||||
case *Schema:
|
||||
sch.allItemsEvaluated = true
|
||||
case []*Schema:
|
||||
sch.numItemsEvaluated = len(items)
|
||||
}
|
||||
} else {
|
||||
sch.allItemsEvaluated = sch.Items2020 != nil
|
||||
sch.numItemsEvaluated = len(sch.PrefixItems)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compileObject(obj map[string]any, sch *Schema, r *root, q *queue) error {
|
||||
if len(obj) == 0 {
|
||||
b := true
|
||||
sch.Bool = &b
|
||||
return nil
|
||||
}
|
||||
oc := objCompiler{
|
||||
c: c,
|
||||
obj: obj,
|
||||
up: sch.up,
|
||||
r: r,
|
||||
res: r.resource(sch.up.ptr),
|
||||
q: q,
|
||||
}
|
||||
return oc.compile(sch)
|
||||
}
|
||||
|
||||
// queue --
|
||||
|
||||
type queue []*Schema
|
||||
|
||||
func (q *queue) append(sch *Schema) {
|
||||
*q = append(*q, sch)
|
||||
}
|
||||
|
||||
func (q *queue) at(i int) *Schema {
|
||||
return (*q)[i]
|
||||
}
|
||||
|
||||
func (q *queue) len() int {
|
||||
return len(*q)
|
||||
}
|
||||
|
||||
func (q *queue) get(up urlPtr) *Schema {
|
||||
i := slices.IndexFunc(*q, func(sch *Schema) bool { return sch.up == up })
|
||||
if i != -1 {
|
||||
return (*q)[i]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// regexp --
|
||||
|
||||
// Regexp is the representation of compiled regular expression.
|
||||
type Regexp interface {
|
||||
fmt.Stringer
|
||||
|
||||
// MatchString reports whether the string s contains
|
||||
// any match of the regular expression.
|
||||
MatchString(string) bool
|
||||
}
|
||||
|
||||
// RegexpEngine parses a regular expression and returns,
|
||||
// if successful, a Regexp object that can be used to
|
||||
// match against text.
|
||||
type RegexpEngine func(string) (Regexp, error)
|
||||
|
||||
func (re RegexpEngine) validate(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
_, err := re(s)
|
||||
return err
|
||||
}
|
||||
|
||||
func goRegexpCompile(s string) (Regexp, error) {
|
||||
return regexp.Compile(s)
|
||||
}
|
||||
51
vendor/github.com/santhosh-tekuri/jsonschema/v6/content.go
generated
vendored
Normal file
51
vendor/github.com/santhosh-tekuri/jsonschema/v6/content.go
generated
vendored
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
// Decoder specifies how to decode specific contentEncoding.
|
||||
type Decoder struct {
|
||||
// Name of contentEncoding.
|
||||
Name string
|
||||
// Decode given string to byte array.
|
||||
Decode func(string) ([]byte, error)
|
||||
}
|
||||
|
||||
var decoders = map[string]*Decoder{
|
||||
"base64": {
|
||||
Name: "base64",
|
||||
Decode: func(s string) ([]byte, error) {
|
||||
return base64.StdEncoding.DecodeString(s)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// MediaType specified how to validate bytes against specific contentMediaType.
|
||||
type MediaType struct {
|
||||
// Name of contentMediaType.
|
||||
Name string
|
||||
|
||||
// Validate checks whether bytes conform to this mediatype.
|
||||
Validate func([]byte) error
|
||||
|
||||
// UnmarshalJSON unmarshals bytes into json value.
|
||||
// This must be nil if this mediatype is not compatible
|
||||
// with json.
|
||||
UnmarshalJSON func([]byte) (any, error)
|
||||
}
|
||||
|
||||
var mediaTypes = map[string]*MediaType{
|
||||
"application/json": {
|
||||
Name: "application/json",
|
||||
Validate: func(b []byte) error {
|
||||
var v any
|
||||
return json.Unmarshal(b, &v)
|
||||
},
|
||||
UnmarshalJSON: func(b []byte) (any, error) {
|
||||
return UnmarshalJSON(bytes.NewReader(b))
|
||||
},
|
||||
},
|
||||
}
|
||||
360
vendor/github.com/santhosh-tekuri/jsonschema/v6/draft.go
generated
vendored
Normal file
360
vendor/github.com/santhosh-tekuri/jsonschema/v6/draft.go
generated
vendored
Normal file
|
|
@ -0,0 +1,360 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Draft represents json-schema specification.
|
||||
type Draft struct {
|
||||
version int
|
||||
url string
|
||||
sch *Schema
|
||||
id string // property name used to represent id
|
||||
subschemas []SchemaPath // locations of subschemas
|
||||
vocabPrefix string // prefix used for vocabulary
|
||||
allVocabs map[string]*Schema // names of supported vocabs with its schemas
|
||||
defaultVocabs []string // names of default vocabs
|
||||
}
|
||||
|
||||
// String returns the specification url.
|
||||
func (d *Draft) String() string {
|
||||
return d.url
|
||||
}
|
||||
|
||||
var (
|
||||
Draft4 = &Draft{
|
||||
version: 4,
|
||||
url: "http://json-schema.org/draft-04/schema",
|
||||
id: "id",
|
||||
subschemas: []SchemaPath{
|
||||
// type agonistic
|
||||
schemaPath("definitions/*"),
|
||||
schemaPath("not"),
|
||||
schemaPath("allOf/[]"),
|
||||
schemaPath("anyOf/[]"),
|
||||
schemaPath("oneOf/[]"),
|
||||
// object
|
||||
schemaPath("properties/*"),
|
||||
schemaPath("additionalProperties"),
|
||||
schemaPath("patternProperties/*"),
|
||||
// array
|
||||
schemaPath("items"),
|
||||
schemaPath("items/[]"),
|
||||
schemaPath("additionalItems"),
|
||||
schemaPath("dependencies/*"),
|
||||
},
|
||||
vocabPrefix: "",
|
||||
allVocabs: map[string]*Schema{},
|
||||
defaultVocabs: []string{},
|
||||
}
|
||||
|
||||
Draft6 = &Draft{
|
||||
version: 6,
|
||||
url: "http://json-schema.org/draft-06/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft4.subschemas,
|
||||
schemaPath("propertyNames"),
|
||||
schemaPath("contains"),
|
||||
),
|
||||
vocabPrefix: "",
|
||||
allVocabs: map[string]*Schema{},
|
||||
defaultVocabs: []string{},
|
||||
}
|
||||
|
||||
Draft7 = &Draft{
|
||||
version: 7,
|
||||
url: "http://json-schema.org/draft-07/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft6.subschemas,
|
||||
schemaPath("if"),
|
||||
schemaPath("then"),
|
||||
schemaPath("else"),
|
||||
),
|
||||
vocabPrefix: "",
|
||||
allVocabs: map[string]*Schema{},
|
||||
defaultVocabs: []string{},
|
||||
}
|
||||
|
||||
Draft2019 = &Draft{
|
||||
version: 2019,
|
||||
url: "https://json-schema.org/draft/2019-09/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft7.subschemas,
|
||||
schemaPath("$defs/*"),
|
||||
schemaPath("dependentSchemas/*"),
|
||||
schemaPath("unevaluatedProperties"),
|
||||
schemaPath("unevaluatedItems"),
|
||||
schemaPath("contentSchema"),
|
||||
),
|
||||
vocabPrefix: "https://json-schema.org/draft/2019-09/vocab/",
|
||||
allVocabs: map[string]*Schema{
|
||||
"core": nil,
|
||||
"applicator": nil,
|
||||
"validation": nil,
|
||||
"meta-data": nil,
|
||||
"format": nil,
|
||||
"content": nil,
|
||||
},
|
||||
defaultVocabs: []string{"core", "applicator", "validation"},
|
||||
}
|
||||
|
||||
Draft2020 = &Draft{
|
||||
version: 2020,
|
||||
url: "https://json-schema.org/draft/2020-12/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft2019.subschemas,
|
||||
schemaPath("prefixItems/[]"),
|
||||
),
|
||||
vocabPrefix: "https://json-schema.org/draft/2020-12/vocab/",
|
||||
allVocabs: map[string]*Schema{
|
||||
"core": nil,
|
||||
"applicator": nil,
|
||||
"unevaluated": nil,
|
||||
"validation": nil,
|
||||
"meta-data": nil,
|
||||
"format-annotation": nil,
|
||||
"format-assertion": nil,
|
||||
"content": nil,
|
||||
},
|
||||
defaultVocabs: []string{"core", "applicator", "unevaluated", "validation"},
|
||||
}
|
||||
|
||||
draftLatest = Draft2020
|
||||
)
|
||||
|
||||
func init() {
|
||||
c := NewCompiler()
|
||||
c.AssertFormat()
|
||||
for _, d := range []*Draft{Draft4, Draft6, Draft7, Draft2019, Draft2020} {
|
||||
d.sch = c.MustCompile(d.url)
|
||||
for name := range d.allVocabs {
|
||||
d.allVocabs[name] = c.MustCompile(strings.TrimSuffix(d.url, "schema") + "meta/" + name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func draftFromURL(url string) *Draft {
|
||||
u, frag := split(url)
|
||||
if frag != "" {
|
||||
return nil
|
||||
}
|
||||
u, ok := strings.CutPrefix(u, "http://")
|
||||
if !ok {
|
||||
u, _ = strings.CutPrefix(u, "https://")
|
||||
}
|
||||
switch u {
|
||||
case "json-schema.org/schema":
|
||||
return draftLatest
|
||||
case "json-schema.org/draft/2020-12/schema":
|
||||
return Draft2020
|
||||
case "json-schema.org/draft/2019-09/schema":
|
||||
return Draft2019
|
||||
case "json-schema.org/draft-07/schema":
|
||||
return Draft7
|
||||
case "json-schema.org/draft-06/schema":
|
||||
return Draft6
|
||||
case "json-schema.org/draft-04/schema":
|
||||
return Draft4
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Draft) getID(obj map[string]any) string {
|
||||
if d.version < 2019 {
|
||||
if _, ok := obj["$ref"]; ok {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
id, ok := strVal(obj, d.id)
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
id, _ = split(id) // ignore fragment
|
||||
return id
|
||||
}
|
||||
|
||||
func (d *Draft) getVocabs(url url, doc any, vocabularies map[string]*Vocabulary) ([]string, error) {
|
||||
if d.version < 2019 {
|
||||
return nil, nil
|
||||
}
|
||||
obj, ok := doc.(map[string]any)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
v, ok := obj["$vocabulary"]
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
obj, ok = v.(map[string]any)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var vocabs []string
|
||||
for vocab, reqd := range obj {
|
||||
if reqd, ok := reqd.(bool); !ok || !reqd {
|
||||
continue
|
||||
}
|
||||
name, ok := strings.CutPrefix(vocab, d.vocabPrefix)
|
||||
if ok {
|
||||
if _, ok := d.allVocabs[name]; ok {
|
||||
if !slices.Contains(vocabs, name) {
|
||||
vocabs = append(vocabs, name)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if _, ok := vocabularies[vocab]; !ok {
|
||||
return nil, &UnsupportedVocabularyError{url.String(), vocab}
|
||||
}
|
||||
if !slices.Contains(vocabs, vocab) {
|
||||
vocabs = append(vocabs, vocab)
|
||||
}
|
||||
}
|
||||
if !slices.Contains(vocabs, "core") {
|
||||
vocabs = append(vocabs, "core")
|
||||
}
|
||||
return vocabs, nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type dialect struct {
|
||||
draft *Draft
|
||||
vocabs []string // nil means use draft.defaultVocabs
|
||||
}
|
||||
|
||||
func (d *dialect) hasVocab(name string) bool {
|
||||
if name == "core" || d.draft.version < 2019 {
|
||||
return true
|
||||
}
|
||||
if d.vocabs != nil {
|
||||
return slices.Contains(d.vocabs, name)
|
||||
}
|
||||
return slices.Contains(d.draft.defaultVocabs, name)
|
||||
}
|
||||
|
||||
func (d *dialect) activeVocabs(assertVocabs bool, vocabularies map[string]*Vocabulary) []string {
|
||||
if len(vocabularies) == 0 {
|
||||
return d.vocabs
|
||||
}
|
||||
if d.draft.version < 2019 {
|
||||
assertVocabs = true
|
||||
}
|
||||
if !assertVocabs {
|
||||
return d.vocabs
|
||||
}
|
||||
var vocabs []string
|
||||
if d.vocabs == nil {
|
||||
vocabs = slices.Clone(d.draft.defaultVocabs)
|
||||
} else {
|
||||
vocabs = slices.Clone(d.vocabs)
|
||||
}
|
||||
for vocab := range vocabularies {
|
||||
if !slices.Contains(vocabs, vocab) {
|
||||
vocabs = append(vocabs, vocab)
|
||||
}
|
||||
}
|
||||
return vocabs
|
||||
}
|
||||
|
||||
func (d *dialect) getSchema(assertVocabs bool, vocabularies map[string]*Vocabulary) *Schema {
|
||||
vocabs := d.activeVocabs(assertVocabs, vocabularies)
|
||||
if vocabs == nil {
|
||||
return d.draft.sch
|
||||
}
|
||||
|
||||
var allOf []*Schema
|
||||
for _, vocab := range vocabs {
|
||||
sch := d.draft.allVocabs[vocab]
|
||||
if sch == nil {
|
||||
if v, ok := vocabularies[vocab]; ok {
|
||||
sch = v.Schema
|
||||
}
|
||||
}
|
||||
if sch != nil {
|
||||
allOf = append(allOf, sch)
|
||||
}
|
||||
}
|
||||
if !slices.Contains(vocabs, "core") {
|
||||
sch := d.draft.allVocabs["core"]
|
||||
if sch == nil {
|
||||
sch = d.draft.sch
|
||||
}
|
||||
allOf = append(allOf, sch)
|
||||
}
|
||||
sch := &Schema{
|
||||
Location: "urn:mem:metaschema",
|
||||
up: urlPtr{url("urn:mem:metaschema"), ""},
|
||||
DraftVersion: d.draft.version,
|
||||
AllOf: allOf,
|
||||
}
|
||||
sch.resource = sch
|
||||
if sch.DraftVersion >= 2020 {
|
||||
sch.DynamicAnchor = "meta"
|
||||
sch.dynamicAnchors = map[string]*Schema{
|
||||
"meta": sch,
|
||||
}
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ParseIDError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *ParseIDError) Error() string {
|
||||
return fmt.Sprintf("error in parsing id at %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ParseAnchorError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *ParseAnchorError) Error() string {
|
||||
return fmt.Sprintf("error in parsing anchor at %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DuplicateIDError struct {
|
||||
ID string
|
||||
URL string
|
||||
Ptr1 string
|
||||
Ptr2 string
|
||||
}
|
||||
|
||||
func (e *DuplicateIDError) Error() string {
|
||||
return fmt.Sprintf("duplicate id %q in %q at %q and %q", e.ID, e.URL, e.Ptr1, e.Ptr2)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DuplicateAnchorError struct {
|
||||
Anchor string
|
||||
URL string
|
||||
Ptr1 string
|
||||
Ptr2 string
|
||||
}
|
||||
|
||||
func (e *DuplicateAnchorError) Error() string {
|
||||
return fmt.Sprintf("duplicate anchor %q in %q at %q and %q", e.Anchor, e.URL, e.Ptr1, e.Ptr2)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func joinSubschemas(a1 []SchemaPath, a2 ...SchemaPath) []SchemaPath {
|
||||
var a []SchemaPath
|
||||
a = append(a, a1...)
|
||||
a = append(a, a2...)
|
||||
return a
|
||||
}
|
||||
708
vendor/github.com/santhosh-tekuri/jsonschema/v6/format.go
generated
vendored
Normal file
708
vendor/github.com/santhosh-tekuri/jsonschema/v6/format.go
generated
vendored
Normal file
|
|
@ -0,0 +1,708 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"net/netip"
|
||||
gourl "net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Format defined specific format.
|
||||
type Format struct {
|
||||
// Name of format.
|
||||
Name string
|
||||
|
||||
// Validate checks if given value is of this format.
|
||||
Validate func(v any) error
|
||||
}
|
||||
|
||||
var formats = map[string]*Format{
|
||||
"json-pointer": {"json-pointer", validateJSONPointer},
|
||||
"relative-json-pointer": {"relative-json-pointer", validateRelativeJSONPointer},
|
||||
"uuid": {"uuid", validateUUID},
|
||||
"duration": {"duration", validateDuration},
|
||||
"period": {"period", validatePeriod},
|
||||
"ipv4": {"ipv4", validateIPV4},
|
||||
"ipv6": {"ipv6", validateIPV6},
|
||||
"hostname": {"hostname", validateHostname},
|
||||
"email": {"email", validateEmail},
|
||||
"date": {"date", validateDate},
|
||||
"time": {"time", validateTime},
|
||||
"date-time": {"date-time", validateDateTime},
|
||||
"uri": {"uri", validateURI},
|
||||
"iri": {"iri", validateURI},
|
||||
"uri-reference": {"uri-reference", validateURIReference},
|
||||
"iri-reference": {"iri-reference", validateURIReference},
|
||||
"uri-template": {"uri-template", validateURITemplate},
|
||||
"semver": {"semver", validateSemver},
|
||||
}
|
||||
|
||||
// see https://www.rfc-editor.org/rfc/rfc6901#section-3
|
||||
func validateJSONPointer(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if s == "" {
|
||||
return nil
|
||||
}
|
||||
if !strings.HasPrefix(s, "/") {
|
||||
return LocalizableError("not starting with /")
|
||||
}
|
||||
for _, tok := range strings.Split(s, "/")[1:] {
|
||||
escape := false
|
||||
for _, ch := range tok {
|
||||
if escape {
|
||||
escape = false
|
||||
if ch != '0' && ch != '1' {
|
||||
return LocalizableError("~ must be followed by 0 or 1")
|
||||
}
|
||||
continue
|
||||
}
|
||||
if ch == '~' {
|
||||
escape = true
|
||||
continue
|
||||
}
|
||||
switch {
|
||||
case ch >= '\x00' && ch <= '\x2E':
|
||||
case ch >= '\x30' && ch <= '\x7D':
|
||||
case ch >= '\x7F' && ch <= '\U0010FFFF':
|
||||
default:
|
||||
return LocalizableError("invalid character %q", ch)
|
||||
}
|
||||
}
|
||||
if escape {
|
||||
return LocalizableError("~ must be followed by 0 or 1")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
func validateRelativeJSONPointer(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// start with non-negative-integer
|
||||
numDigits := 0
|
||||
for _, ch := range s {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
numDigits++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if numDigits == 0 {
|
||||
return LocalizableError("must start with non-negative integer")
|
||||
}
|
||||
if numDigits > 1 && strings.HasPrefix(s, "0") {
|
||||
return LocalizableError("starts with zero")
|
||||
}
|
||||
s = s[numDigits:]
|
||||
|
||||
// followed by either json-pointer or '#'
|
||||
if s == "#" {
|
||||
return nil
|
||||
}
|
||||
return validateJSONPointer(s)
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc4122#page-4
|
||||
func validateUUID(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
hexGroups := []int{8, 4, 4, 4, 12}
|
||||
groups := strings.Split(s, "-")
|
||||
if len(groups) != len(hexGroups) {
|
||||
return LocalizableError("must have %d elements", len(hexGroups))
|
||||
}
|
||||
for i, group := range groups {
|
||||
if len(group) != hexGroups[i] {
|
||||
return LocalizableError("element %d must be %d characters long", i+1, hexGroups[i])
|
||||
}
|
||||
for _, ch := range group {
|
||||
switch {
|
||||
case ch >= '0' && ch <= '9':
|
||||
case ch >= 'a' && ch <= 'f':
|
||||
case ch >= 'A' && ch <= 'F':
|
||||
default:
|
||||
return LocalizableError("non-hex character %q", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A
|
||||
func validateDuration(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// must start with 'P'
|
||||
s, ok = strings.CutPrefix(s, "P")
|
||||
if !ok {
|
||||
return LocalizableError("must start with P")
|
||||
}
|
||||
if s == "" {
|
||||
return LocalizableError("nothing after P")
|
||||
}
|
||||
|
||||
// dur-week
|
||||
if s, ok := strings.CutSuffix(s, "W"); ok {
|
||||
if s == "" {
|
||||
return LocalizableError("no number in week")
|
||||
}
|
||||
for _, ch := range s {
|
||||
if ch < '0' || ch > '9' {
|
||||
return LocalizableError("invalid week")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
allUnits := []string{"YMD", "HMS"}
|
||||
for i, s := range strings.Split(s, "T") {
|
||||
if i != 0 && s == "" {
|
||||
return LocalizableError("no time elements")
|
||||
}
|
||||
if i >= len(allUnits) {
|
||||
return LocalizableError("more than one T")
|
||||
}
|
||||
units := allUnits[i]
|
||||
for s != "" {
|
||||
digitCount := 0
|
||||
for _, ch := range s {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
digitCount++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if digitCount == 0 {
|
||||
return LocalizableError("missing number")
|
||||
}
|
||||
s = s[digitCount:]
|
||||
if s == "" {
|
||||
return LocalizableError("missing unit")
|
||||
}
|
||||
unit := s[0]
|
||||
j := strings.IndexByte(units, unit)
|
||||
if j == -1 {
|
||||
if strings.IndexByte(allUnits[i], unit) != -1 {
|
||||
return LocalizableError("unit %q out of order", unit)
|
||||
}
|
||||
return LocalizableError("invalid unit %q", unit)
|
||||
}
|
||||
units = units[j+1:]
|
||||
s = s[1:]
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateIPV4(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
groups := strings.Split(s, ".")
|
||||
if len(groups) != 4 {
|
||||
return LocalizableError("expected four decimals")
|
||||
}
|
||||
for _, group := range groups {
|
||||
if len(group) > 1 && group[0] == '0' {
|
||||
return LocalizableError("leading zeros")
|
||||
}
|
||||
n, err := strconv.Atoi(group)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if n < 0 || n > 255 {
|
||||
return LocalizableError("decimal must be between 0 and 255")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateIPV6(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if !strings.Contains(s, ":") {
|
||||
return LocalizableError("missing colon")
|
||||
}
|
||||
addr, err := netip.ParseAddr(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if addr.Zone() != "" {
|
||||
return LocalizableError("zone id is not a part of ipv6 address")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names
|
||||
func validateHostname(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters
|
||||
s = strings.TrimSuffix(s, ".")
|
||||
if len(s) > 253 {
|
||||
return LocalizableError("more than 253 characters long")
|
||||
}
|
||||
|
||||
// Hostnames are composed of series of labels concatenated with dots, as are all domain names
|
||||
for _, label := range strings.Split(s, ".") {
|
||||
// Each label must be from 1 to 63 characters long
|
||||
if len(label) < 1 || len(label) > 63 {
|
||||
return LocalizableError("label must be 1 to 63 characters long")
|
||||
}
|
||||
|
||||
// labels must not start or end with a hyphen
|
||||
if strings.HasPrefix(label, "-") {
|
||||
return LocalizableError("label starts with hyphen")
|
||||
}
|
||||
if strings.HasSuffix(label, "-") {
|
||||
return LocalizableError("label ends with hyphen")
|
||||
}
|
||||
|
||||
// labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner),
|
||||
// the digits '0' through '9', and the hyphen ('-')
|
||||
for _, ch := range label {
|
||||
switch {
|
||||
case ch >= 'a' && ch <= 'z':
|
||||
case ch >= 'A' && ch <= 'Z':
|
||||
case ch >= '0' && ch <= '9':
|
||||
case ch == '-':
|
||||
default:
|
||||
return LocalizableError("invalid character %q", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://en.wikipedia.org/wiki/Email_address
|
||||
func validateEmail(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
// entire email address to be no more than 254 characters long
|
||||
if len(s) > 254 {
|
||||
return LocalizableError("more than 255 characters long")
|
||||
}
|
||||
|
||||
// email address is generally recognized as having two parts joined with an at-sign
|
||||
at := strings.LastIndexByte(s, '@')
|
||||
if at == -1 {
|
||||
return LocalizableError("missing @")
|
||||
}
|
||||
local, domain := s[:at], s[at+1:]
|
||||
|
||||
// local part may be up to 64 characters long
|
||||
if len(local) > 64 {
|
||||
return LocalizableError("local part more than 64 characters long")
|
||||
}
|
||||
|
||||
if len(local) > 1 && strings.HasPrefix(local, `"`) && strings.HasPrefix(local, `"`) {
|
||||
// quoted
|
||||
local := local[1 : len(local)-1]
|
||||
if strings.IndexByte(local, '\\') != -1 || strings.IndexByte(local, '"') != -1 {
|
||||
return LocalizableError("backslash and quote are not allowed within quoted local part")
|
||||
}
|
||||
} else {
|
||||
// unquoted
|
||||
if strings.HasPrefix(local, ".") {
|
||||
return LocalizableError("starts with dot")
|
||||
}
|
||||
if strings.HasSuffix(local, ".") {
|
||||
return LocalizableError("ends with dot")
|
||||
}
|
||||
|
||||
// consecutive dots not allowed
|
||||
if strings.Contains(local, "..") {
|
||||
return LocalizableError("consecutive dots")
|
||||
}
|
||||
|
||||
// check allowed chars
|
||||
for _, ch := range local {
|
||||
switch {
|
||||
case ch >= 'a' && ch <= 'z':
|
||||
case ch >= 'A' && ch <= 'Z':
|
||||
case ch >= '0' && ch <= '9':
|
||||
case strings.ContainsRune(".!#$%&'*+-/=?^_`{|}~", ch):
|
||||
default:
|
||||
return LocalizableError("invalid character %q", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// domain if enclosed in brackets, must match an IP address
|
||||
if strings.HasPrefix(domain, "[") && strings.HasSuffix(domain, "]") {
|
||||
domain = domain[1 : len(domain)-1]
|
||||
if rem, ok := strings.CutPrefix(domain, "IPv6:"); ok {
|
||||
if err := validateIPV6(rem); err != nil {
|
||||
return LocalizableError("invalid ipv6 address: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := validateIPV4(domain); err != nil {
|
||||
return LocalizableError("invalid ipv4 address: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// domain must match the requirements for a hostname
|
||||
if err := validateHostname(domain); err != nil {
|
||||
return LocalizableError("invalid domain: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// see see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
|
||||
func validateDate(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
_, err := time.Parse("2006-01-02", s)
|
||||
return err
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
|
||||
// NOTE: golang time package does not support leap seconds.
|
||||
func validateTime(v any) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// min: hh:mm:ssZ
|
||||
if len(str) < 9 {
|
||||
return LocalizableError("less than 9 characters long")
|
||||
}
|
||||
if str[2] != ':' || str[5] != ':' {
|
||||
return LocalizableError("missing colon in correct place")
|
||||
}
|
||||
|
||||
// parse hh:mm:ss
|
||||
var hms []int
|
||||
for _, tok := range strings.SplitN(str[:8], ":", 3) {
|
||||
i, err := strconv.Atoi(tok)
|
||||
if err != nil {
|
||||
return LocalizableError("invalid hour/min/sec")
|
||||
}
|
||||
if i < 0 {
|
||||
return LocalizableError("non-positive hour/min/sec")
|
||||
}
|
||||
hms = append(hms, i)
|
||||
}
|
||||
if len(hms) != 3 {
|
||||
return LocalizableError("missing hour/min/sec")
|
||||
}
|
||||
h, m, s := hms[0], hms[1], hms[2]
|
||||
if h > 23 || m > 59 || s > 60 {
|
||||
return LocalizableError("hour/min/sec out of range")
|
||||
}
|
||||
str = str[8:]
|
||||
|
||||
// parse sec-frac if present
|
||||
if rem, ok := strings.CutPrefix(str, "."); ok {
|
||||
numDigits := 0
|
||||
for _, ch := range rem {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
numDigits++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if numDigits == 0 {
|
||||
return LocalizableError("no digits in second fraction")
|
||||
}
|
||||
str = rem[numDigits:]
|
||||
}
|
||||
|
||||
if str != "z" && str != "Z" {
|
||||
// parse time-numoffset
|
||||
if len(str) != 6 {
|
||||
return LocalizableError("offset must be 6 characters long")
|
||||
}
|
||||
var sign int
|
||||
switch str[0] {
|
||||
case '+':
|
||||
sign = -1
|
||||
case '-':
|
||||
sign = +1
|
||||
default:
|
||||
return LocalizableError("offset must begin with plus/minus")
|
||||
}
|
||||
str = str[1:]
|
||||
if str[2] != ':' {
|
||||
return LocalizableError("missing colon in offset in correct place")
|
||||
}
|
||||
|
||||
var zhm []int
|
||||
for _, tok := range strings.SplitN(str, ":", 2) {
|
||||
i, err := strconv.Atoi(tok)
|
||||
if err != nil {
|
||||
return LocalizableError("invalid hour/min in offset")
|
||||
}
|
||||
if i < 0 {
|
||||
return LocalizableError("non-positive hour/min in offset")
|
||||
}
|
||||
zhm = append(zhm, i)
|
||||
}
|
||||
zh, zm := zhm[0], zhm[1]
|
||||
if zh > 23 || zm > 59 {
|
||||
return LocalizableError("hour/min in offset out of range")
|
||||
}
|
||||
|
||||
// apply timezone
|
||||
hm := (h*60 + m) + sign*(zh*60+zm)
|
||||
if hm < 0 {
|
||||
hm += 24 * 60
|
||||
}
|
||||
h, m = hm/60, hm%60
|
||||
}
|
||||
|
||||
// check leap second
|
||||
if s >= 60 && (h != 23 || m != 59) {
|
||||
return LocalizableError("invalid leap second")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
|
||||
func validateDateTime(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// min: yyyy-mm-ddThh:mm:ssZ
|
||||
if len(s) < 20 {
|
||||
return LocalizableError("less than 20 characters long")
|
||||
}
|
||||
|
||||
if s[10] != 't' && s[10] != 'T' {
|
||||
return LocalizableError("11th character must be t or T")
|
||||
}
|
||||
if err := validateDate(s[:10]); err != nil {
|
||||
return LocalizableError("invalid date element: %v", err)
|
||||
}
|
||||
if err := validateTime(s[11:]); err != nil {
|
||||
return LocalizableError("invalid time element: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseURL(s string) (*gourl.URL, error) {
|
||||
u, err := gourl.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// gourl does not validate ipv6 host address
|
||||
hostName := u.Hostname()
|
||||
if strings.Contains(hostName, ":") {
|
||||
if !strings.Contains(u.Host, "[") || !strings.Contains(u.Host, "]") {
|
||||
return nil, LocalizableError("ipv6 address not enclosed in brackets")
|
||||
}
|
||||
if err := validateIPV6(hostName); err != nil {
|
||||
return nil, LocalizableError("invalid ipv6 address: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return u, nil
|
||||
}
|
||||
|
||||
func validateURI(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
u, err := parseURL(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !u.IsAbs() {
|
||||
return LocalizableError("relative url")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateURIReference(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if strings.Contains(s, `\`) {
|
||||
return LocalizableError(`contains \`)
|
||||
}
|
||||
_, err := parseURL(s)
|
||||
return err
|
||||
}
|
||||
|
||||
func validateURITemplate(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
u, err := parseURL(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, tok := range strings.Split(u.RawPath, "/") {
|
||||
tok, err = decode(tok)
|
||||
if err != nil {
|
||||
return LocalizableError("percent decode failed: %v", err)
|
||||
}
|
||||
want := true
|
||||
for _, ch := range tok {
|
||||
var got bool
|
||||
switch ch {
|
||||
case '{':
|
||||
got = true
|
||||
case '}':
|
||||
got = false
|
||||
default:
|
||||
continue
|
||||
}
|
||||
if got != want {
|
||||
return LocalizableError("nested curly braces")
|
||||
}
|
||||
want = !want
|
||||
}
|
||||
if !want {
|
||||
return LocalizableError("no matching closing brace")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validatePeriod(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
slash := strings.IndexByte(s, '/')
|
||||
if slash == -1 {
|
||||
return LocalizableError("missing slash")
|
||||
}
|
||||
|
||||
start, end := s[:slash], s[slash+1:]
|
||||
if strings.HasPrefix(start, "P") {
|
||||
if err := validateDuration(start); err != nil {
|
||||
return LocalizableError("invalid start duration: %v", err)
|
||||
}
|
||||
if err := validateDateTime(end); err != nil {
|
||||
return LocalizableError("invalid end date-time: %v", err)
|
||||
}
|
||||
} else {
|
||||
if err := validateDateTime(start); err != nil {
|
||||
return LocalizableError("invalid start date-time: %v", err)
|
||||
}
|
||||
if strings.HasPrefix(end, "P") {
|
||||
if err := validateDuration(end); err != nil {
|
||||
return LocalizableError("invalid end duration: %v", err)
|
||||
}
|
||||
} else if err := validateDateTime(end); err != nil {
|
||||
return LocalizableError("invalid end date-time: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://semver.org/#backusnaur-form-grammar-for-valid-semver-versions
|
||||
func validateSemver(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// build --
|
||||
if i := strings.IndexByte(s, '+'); i != -1 {
|
||||
build := s[i+1:]
|
||||
if build == "" {
|
||||
return LocalizableError("build is empty")
|
||||
}
|
||||
for _, buildID := range strings.Split(build, ".") {
|
||||
if buildID == "" {
|
||||
return LocalizableError("build identifier is empty")
|
||||
}
|
||||
for _, ch := range buildID {
|
||||
switch {
|
||||
case ch >= '0' && ch <= '9':
|
||||
case (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '-':
|
||||
default:
|
||||
return LocalizableError("invalid character %q in build identifier", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
s = s[:i]
|
||||
}
|
||||
|
||||
// pre-release --
|
||||
if i := strings.IndexByte(s, '-'); i != -1 {
|
||||
preRelease := s[i+1:]
|
||||
for _, preReleaseID := range strings.Split(preRelease, ".") {
|
||||
if preReleaseID == "" {
|
||||
return LocalizableError("pre-release identifier is empty")
|
||||
}
|
||||
allDigits := true
|
||||
for _, ch := range preReleaseID {
|
||||
switch {
|
||||
case ch >= '0' && ch <= '9':
|
||||
case (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '-':
|
||||
allDigits = false
|
||||
default:
|
||||
return LocalizableError("invalid character %q in pre-release identifier", ch)
|
||||
}
|
||||
}
|
||||
if allDigits && len(preReleaseID) > 1 && preReleaseID[0] == '0' {
|
||||
return LocalizableError("pre-release numeric identifier starts with zero")
|
||||
}
|
||||
}
|
||||
s = s[:i]
|
||||
}
|
||||
|
||||
// versionCore --
|
||||
versions := strings.Split(s, ".")
|
||||
if len(versions) != 3 {
|
||||
return LocalizableError("versionCore must have 3 numbers separated by dot")
|
||||
}
|
||||
names := []string{"major", "minor", "patch"}
|
||||
for i, version := range versions {
|
||||
if version == "" {
|
||||
return LocalizableError("%s is empty", names[i])
|
||||
}
|
||||
if len(version) > 1 && version[0] == '0' {
|
||||
return LocalizableError("%s starts with zero", names[i])
|
||||
}
|
||||
for _, ch := range version {
|
||||
if ch < '0' || ch > '9' {
|
||||
return LocalizableError("%s contains non-digit", names[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
8
vendor/github.com/santhosh-tekuri/jsonschema/v6/go.work
generated
vendored
Normal file
8
vendor/github.com/santhosh-tekuri/jsonschema/v6/go.work
generated
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
go 1.21.1
|
||||
|
||||
use (
|
||||
.
|
||||
./cmd/jv
|
||||
)
|
||||
|
||||
replace github.com/santhosh-tekuri/jsonschema/v6 v6.0.0 => ./
|
||||
651
vendor/github.com/santhosh-tekuri/jsonschema/v6/kind/kind.go
generated
vendored
Normal file
651
vendor/github.com/santhosh-tekuri/jsonschema/v6/kind/kind.go
generated
vendored
Normal file
|
|
@ -0,0 +1,651 @@
|
|||
package kind
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/big"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
// --
|
||||
|
||||
type InvalidJsonValue struct {
|
||||
Value any
|
||||
}
|
||||
|
||||
func (*InvalidJsonValue) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *InvalidJsonValue) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("invalid jsonType %T", k.Value)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Schema struct {
|
||||
Location string
|
||||
}
|
||||
|
||||
func (*Schema) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *Schema) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("jsonschema validation failed with %s", quote(k.Location))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Group struct{}
|
||||
|
||||
func (*Group) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*Group) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("validation failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Not struct{}
|
||||
|
||||
func (*Not) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*Not) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("not failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AllOf struct{}
|
||||
|
||||
func (*AllOf) KeywordPath() []string {
|
||||
return []string{"allOf"}
|
||||
}
|
||||
|
||||
func (*AllOf) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("allOf failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AnyOf struct{}
|
||||
|
||||
func (*AnyOf) KeywordPath() []string {
|
||||
return []string{"anyOf"}
|
||||
}
|
||||
|
||||
func (*AnyOf) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("anyOf failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type OneOf struct {
|
||||
// Subschemas gives indexes of Subschemas that have matched.
|
||||
// Value nil, means none of the subschemas matched.
|
||||
Subschemas []int
|
||||
}
|
||||
|
||||
func (*OneOf) KeywordPath() []string {
|
||||
return []string{"oneOf"}
|
||||
}
|
||||
|
||||
func (k *OneOf) LocalizedString(p *message.Printer) string {
|
||||
if len(k.Subschemas) == 0 {
|
||||
return p.Sprintf("oneOf failed, none matched")
|
||||
}
|
||||
return p.Sprintf("oneOf failed, subschemas %d, %d matched", k.Subschemas[0], k.Subschemas[1])
|
||||
}
|
||||
|
||||
//--
|
||||
|
||||
type FalseSchema struct{}
|
||||
|
||||
func (*FalseSchema) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*FalseSchema) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("false schema")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type RefCycle struct {
|
||||
URL string
|
||||
KeywordLocation1 string
|
||||
KeywordLocation2 string
|
||||
}
|
||||
|
||||
func (*RefCycle) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *RefCycle) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("both %s and %s resolve to %q causing reference cycle", k.KeywordLocation1, k.KeywordLocation2, k.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Type struct {
|
||||
Got string
|
||||
Want []string
|
||||
}
|
||||
|
||||
func (*Type) KeywordPath() []string {
|
||||
return []string{"type"}
|
||||
}
|
||||
|
||||
func (k *Type) LocalizedString(p *message.Printer) string {
|
||||
want := strings.Join(k.Want, " or ")
|
||||
return p.Sprintf("got %s, want %s", k.Got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Enum struct {
|
||||
Got any
|
||||
Want []any
|
||||
}
|
||||
|
||||
// KeywordPath implements jsonschema.ErrorKind.
|
||||
func (*Enum) KeywordPath() []string {
|
||||
return []string{"enum"}
|
||||
}
|
||||
|
||||
func (k *Enum) LocalizedString(p *message.Printer) string {
|
||||
allPrimitive := true
|
||||
loop:
|
||||
for _, item := range k.Want {
|
||||
switch item.(type) {
|
||||
case []any, map[string]any:
|
||||
allPrimitive = false
|
||||
break loop
|
||||
}
|
||||
}
|
||||
if allPrimitive {
|
||||
if len(k.Want) == 1 {
|
||||
return p.Sprintf("value must be %s", display(k.Want[0]))
|
||||
}
|
||||
var want []string
|
||||
for _, v := range k.Want {
|
||||
want = append(want, display(v))
|
||||
}
|
||||
return p.Sprintf("value must be one of %s", strings.Join(want, ", "))
|
||||
}
|
||||
return p.Sprintf("enum failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Const struct {
|
||||
Got any
|
||||
Want any
|
||||
}
|
||||
|
||||
func (*Const) KeywordPath() []string {
|
||||
return []string{"const"}
|
||||
}
|
||||
|
||||
func (k *Const) LocalizedString(p *message.Printer) string {
|
||||
switch want := k.Want.(type) {
|
||||
case []any, map[string]any:
|
||||
return p.Sprintf("const failed")
|
||||
default:
|
||||
return p.Sprintf("value must be %s", display(want))
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Format struct {
|
||||
Got any
|
||||
Want string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (*Format) KeywordPath() []string {
|
||||
return []string{"format"}
|
||||
}
|
||||
|
||||
func (k *Format) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("%s is not valid %s: %v", display(k.Got), k.Want, localizedError(k.Err, p))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Reference struct {
|
||||
Keyword string
|
||||
URL string
|
||||
}
|
||||
|
||||
func (k *Reference) KeywordPath() []string {
|
||||
return []string{k.Keyword}
|
||||
}
|
||||
|
||||
func (*Reference) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("validation failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinProperties struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MinProperties) KeywordPath() []string {
|
||||
return []string{"minProperties"}
|
||||
}
|
||||
|
||||
func (k *MinProperties) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("minProperties: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxProperties struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MaxProperties) KeywordPath() []string {
|
||||
return []string{"maxProperties"}
|
||||
}
|
||||
|
||||
func (k *MaxProperties) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("maxProperties: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinItems struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MinItems) KeywordPath() []string {
|
||||
return []string{"minItems"}
|
||||
}
|
||||
|
||||
func (k *MinItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("minItems: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxItems struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MaxItems) KeywordPath() []string {
|
||||
return []string{"maxItems"}
|
||||
}
|
||||
|
||||
func (k *MaxItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("maxItems: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AdditionalItems struct {
|
||||
Count int
|
||||
}
|
||||
|
||||
func (*AdditionalItems) KeywordPath() []string {
|
||||
return []string{"additionalItems"}
|
||||
}
|
||||
|
||||
func (k *AdditionalItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("last %d additionalItem(s) not allowed", k.Count)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Required struct {
|
||||
Missing []string
|
||||
}
|
||||
|
||||
func (*Required) KeywordPath() []string {
|
||||
return []string{"required"}
|
||||
}
|
||||
|
||||
func (k *Required) LocalizedString(p *message.Printer) string {
|
||||
if len(k.Missing) == 1 {
|
||||
return p.Sprintf("missing property %s", quote(k.Missing[0]))
|
||||
}
|
||||
return p.Sprintf("missing properties %s", joinQuoted(k.Missing, ", "))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Dependency struct {
|
||||
Prop string // dependency of prop that failed
|
||||
Missing []string // missing props
|
||||
}
|
||||
|
||||
func (k *Dependency) KeywordPath() []string {
|
||||
return []string{"dependency", k.Prop}
|
||||
}
|
||||
|
||||
func (k *Dependency) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("properties %s required, if %s exists", joinQuoted(k.Missing, ", "), quote(k.Prop))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DependentRequired struct {
|
||||
Prop string // dependency of prop that failed
|
||||
Missing []string // missing props
|
||||
}
|
||||
|
||||
func (k *DependentRequired) KeywordPath() []string {
|
||||
return []string{"dependentRequired", k.Prop}
|
||||
}
|
||||
|
||||
func (k *DependentRequired) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("properties %s required, if %s exists", joinQuoted(k.Missing, ", "), quote(k.Prop))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AdditionalProperties struct {
|
||||
Properties []string
|
||||
}
|
||||
|
||||
func (*AdditionalProperties) KeywordPath() []string {
|
||||
return []string{"additionalProperties"}
|
||||
}
|
||||
|
||||
func (k *AdditionalProperties) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("additional properties %s not allowed", joinQuoted(k.Properties, ", "))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type PropertyNames struct {
|
||||
Property string
|
||||
}
|
||||
|
||||
func (*PropertyNames) KeywordPath() []string {
|
||||
return []string{"propertyNames"}
|
||||
}
|
||||
|
||||
func (k *PropertyNames) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("invalid propertyName %s", quote(k.Property))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type UniqueItems struct {
|
||||
Duplicates [2]int
|
||||
}
|
||||
|
||||
func (*UniqueItems) KeywordPath() []string {
|
||||
return []string{"uniqueItems"}
|
||||
}
|
||||
|
||||
func (k *UniqueItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("items at %d and %d are equal", k.Duplicates[0], k.Duplicates[1])
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Contains struct{}
|
||||
|
||||
func (*Contains) KeywordPath() []string {
|
||||
return []string{"contains"}
|
||||
}
|
||||
|
||||
func (*Contains) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("no items match contains schema")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinContains struct {
|
||||
Got []int
|
||||
Want int
|
||||
}
|
||||
|
||||
func (*MinContains) KeywordPath() []string {
|
||||
return []string{"minContains"}
|
||||
}
|
||||
|
||||
func (k *MinContains) LocalizedString(p *message.Printer) string {
|
||||
if len(k.Got) == 0 {
|
||||
return p.Sprintf("min %d items required to match contains schema, but none matched", k.Want)
|
||||
} else {
|
||||
got := fmt.Sprintf("%v", k.Got)
|
||||
return p.Sprintf("min %d items required to match contains schema, but matched %d items at %v", k.Want, len(k.Got), got[1:len(got)-1])
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxContains struct {
|
||||
Got []int
|
||||
Want int
|
||||
}
|
||||
|
||||
func (*MaxContains) KeywordPath() []string {
|
||||
return []string{"maxContains"}
|
||||
}
|
||||
|
||||
func (k *MaxContains) LocalizedString(p *message.Printer) string {
|
||||
got := fmt.Sprintf("%v", k.Got)
|
||||
return p.Sprintf("max %d items required to match contains schema, but matched %d items at %v", k.Want, len(k.Got), got[1:len(got)-1])
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinLength struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MinLength) KeywordPath() []string {
|
||||
return []string{"minLength"}
|
||||
}
|
||||
|
||||
func (k *MinLength) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("minLength: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxLength struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MaxLength) KeywordPath() []string {
|
||||
return []string{"maxLength"}
|
||||
}
|
||||
|
||||
func (k *MaxLength) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("maxLength: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Pattern struct {
|
||||
Got string
|
||||
Want string
|
||||
}
|
||||
|
||||
func (*Pattern) KeywordPath() []string {
|
||||
return []string{"pattern"}
|
||||
}
|
||||
|
||||
func (k *Pattern) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("%s does not match pattern %s", quote(k.Got), quote(k.Want))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ContentEncoding struct {
|
||||
Want string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (*ContentEncoding) KeywordPath() []string {
|
||||
return []string{"contentEncoding"}
|
||||
}
|
||||
|
||||
func (k *ContentEncoding) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("value is not %s encoded: %v", quote(k.Want), localizedError(k.Err, p))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ContentMediaType struct {
|
||||
Got []byte
|
||||
Want string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (*ContentMediaType) KeywordPath() []string {
|
||||
return []string{"contentMediaType"}
|
||||
}
|
||||
|
||||
func (k *ContentMediaType) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("value if not of mediatype %s: %v", quote(k.Want), k.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ContentSchema struct{}
|
||||
|
||||
func (*ContentSchema) KeywordPath() []string {
|
||||
return []string{"contentSchema"}
|
||||
}
|
||||
|
||||
func (*ContentSchema) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("contentSchema failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Minimum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*Minimum) KeywordPath() []string {
|
||||
return []string{"minimum"}
|
||||
}
|
||||
|
||||
func (k *Minimum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("minimum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Maximum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*Maximum) KeywordPath() []string {
|
||||
return []string{"maximum"}
|
||||
}
|
||||
|
||||
func (k *Maximum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("maximum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ExclusiveMinimum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*ExclusiveMinimum) KeywordPath() []string {
|
||||
return []string{"exclusiveMinimum"}
|
||||
}
|
||||
|
||||
func (k *ExclusiveMinimum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("exclusiveMinimum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ExclusiveMaximum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*ExclusiveMaximum) KeywordPath() []string {
|
||||
return []string{"exclusiveMaximum"}
|
||||
}
|
||||
|
||||
func (k *ExclusiveMaximum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("exclusiveMaximum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MultipleOf struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*MultipleOf) KeywordPath() []string {
|
||||
return []string{"multipleOf"}
|
||||
}
|
||||
|
||||
func (k *MultipleOf) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("multipleOf: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func quote(s string) string {
|
||||
s = fmt.Sprintf("%q", s)
|
||||
s = strings.ReplaceAll(s, `\"`, `"`)
|
||||
s = strings.ReplaceAll(s, `'`, `\'`)
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
|
||||
func joinQuoted(arr []string, sep string) string {
|
||||
var sb strings.Builder
|
||||
for _, s := range arr {
|
||||
if sb.Len() > 0 {
|
||||
sb.WriteString(sep)
|
||||
}
|
||||
sb.WriteString(quote(s))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// to be used only for primitive.
|
||||
func display(v any) string {
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
return quote(v)
|
||||
case []any, map[string]any:
|
||||
return "value"
|
||||
default:
|
||||
return fmt.Sprintf("%v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func localizedError(err error, p *message.Printer) string {
|
||||
if err, ok := err.(interface{ LocalizedError(*message.Printer) string }); ok {
|
||||
return err.LocalizedError(p)
|
||||
}
|
||||
return err.Error()
|
||||
}
|
||||
266
vendor/github.com/santhosh-tekuri/jsonschema/v6/loader.go
generated
vendored
Normal file
266
vendor/github.com/santhosh-tekuri/jsonschema/v6/loader.go
generated
vendored
Normal file
|
|
@ -0,0 +1,266 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
gourl "net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// URLLoader knows how to load json from given url.
|
||||
type URLLoader interface {
|
||||
// Load loads json from given absolute url.
|
||||
Load(url string) (any, error)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// FileLoader loads json file url.
|
||||
type FileLoader struct{}
|
||||
|
||||
func (l FileLoader) Load(url string) (any, error) {
|
||||
path, err := l.ToFile(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
return UnmarshalJSON(f)
|
||||
}
|
||||
|
||||
// ToFile is helper method to convert file url to file path.
|
||||
func (l FileLoader) ToFile(url string) (string, error) {
|
||||
u, err := gourl.Parse(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if u.Scheme != "file" {
|
||||
return "", fmt.Errorf("invalid file url: %s", u)
|
||||
}
|
||||
path := u.Path
|
||||
if runtime.GOOS == "windows" {
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
path = filepath.FromSlash(path)
|
||||
}
|
||||
return path, nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// SchemeURLLoader delegates to other [URLLoaders]
|
||||
// based on url scheme.
|
||||
type SchemeURLLoader map[string]URLLoader
|
||||
|
||||
func (l SchemeURLLoader) Load(url string) (any, error) {
|
||||
u, err := gourl.Parse(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ll, ok := l[u.Scheme]
|
||||
if !ok {
|
||||
return nil, &UnsupportedURLSchemeError{u.String()}
|
||||
}
|
||||
return ll.Load(url)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
//go:embed metaschemas
|
||||
var metaFS embed.FS
|
||||
|
||||
func openMeta(url string) (fs.File, error) {
|
||||
u, meta := strings.CutPrefix(url, "http://json-schema.org/")
|
||||
if !meta {
|
||||
u, meta = strings.CutPrefix(url, "https://json-schema.org/")
|
||||
}
|
||||
if meta {
|
||||
if u == "schema" {
|
||||
return openMeta(draftLatest.url)
|
||||
}
|
||||
f, err := metaFS.Open("metaschemas/" + u)
|
||||
if err != nil {
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return f, err
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
}
|
||||
|
||||
func isMeta(url string) bool {
|
||||
f, err := openMeta(url)
|
||||
if err != nil {
|
||||
return true
|
||||
}
|
||||
if f != nil {
|
||||
f.Close()
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func loadMeta(url string) (any, error) {
|
||||
f, err := openMeta(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if f == nil {
|
||||
return nil, nil
|
||||
}
|
||||
defer f.Close()
|
||||
return UnmarshalJSON(f)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type defaultLoader struct {
|
||||
docs map[url]any // docs loaded so far
|
||||
loader URLLoader
|
||||
}
|
||||
|
||||
func (l *defaultLoader) add(url url, doc any) bool {
|
||||
if _, ok := l.docs[url]; ok {
|
||||
return false
|
||||
}
|
||||
l.docs[url] = doc
|
||||
return true
|
||||
}
|
||||
|
||||
func (l *defaultLoader) load(url url) (any, error) {
|
||||
if doc, ok := l.docs[url]; ok {
|
||||
return doc, nil
|
||||
}
|
||||
doc, err := loadMeta(url.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if doc != nil {
|
||||
l.add(url, doc)
|
||||
return doc, nil
|
||||
}
|
||||
if l.loader == nil {
|
||||
return nil, &LoadURLError{url.String(), errors.New("no URLLoader set")}
|
||||
}
|
||||
doc, err = l.loader.Load(url.String())
|
||||
if err != nil {
|
||||
return nil, &LoadURLError{URL: url.String(), Err: err}
|
||||
}
|
||||
l.add(url, doc)
|
||||
return doc, nil
|
||||
}
|
||||
|
||||
func (l *defaultLoader) getDraft(up urlPtr, doc any, defaultDraft *Draft, cycle map[url]struct{}) (*Draft, error) {
|
||||
obj, ok := doc.(map[string]any)
|
||||
if !ok {
|
||||
return defaultDraft, nil
|
||||
}
|
||||
sch, ok := strVal(obj, "$schema")
|
||||
if !ok {
|
||||
return defaultDraft, nil
|
||||
}
|
||||
if draft := draftFromURL(sch); draft != nil {
|
||||
return draft, nil
|
||||
}
|
||||
sch, _ = split(sch)
|
||||
if _, err := gourl.Parse(sch); err != nil {
|
||||
return nil, &InvalidMetaSchemaURLError{up.String(), err}
|
||||
}
|
||||
schUrl := url(sch)
|
||||
if up.ptr.isEmpty() && schUrl == up.url {
|
||||
return nil, &UnsupportedDraftError{schUrl.String()}
|
||||
}
|
||||
if _, ok := cycle[schUrl]; ok {
|
||||
return nil, &MetaSchemaCycleError{schUrl.String()}
|
||||
}
|
||||
cycle[schUrl] = struct{}{}
|
||||
doc, err := l.load(schUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return l.getDraft(urlPtr{schUrl, ""}, doc, defaultDraft, cycle)
|
||||
}
|
||||
|
||||
func (l *defaultLoader) getMetaVocabs(doc any, draft *Draft, vocabularies map[string]*Vocabulary) ([]string, error) {
|
||||
obj, ok := doc.(map[string]any)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
sch, ok := strVal(obj, "$schema")
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
if draft := draftFromURL(sch); draft != nil {
|
||||
return nil, nil
|
||||
}
|
||||
sch, _ = split(sch)
|
||||
if _, err := gourl.Parse(sch); err != nil {
|
||||
return nil, &ParseURLError{sch, err}
|
||||
}
|
||||
schUrl := url(sch)
|
||||
doc, err := l.load(schUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return draft.getVocabs(schUrl, doc, vocabularies)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type LoadURLError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *LoadURLError) Error() string {
|
||||
return fmt.Sprintf("failing loading %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type UnsupportedURLSchemeError struct {
|
||||
url string
|
||||
}
|
||||
|
||||
func (e *UnsupportedURLSchemeError) Error() string {
|
||||
return fmt.Sprintf("no URLLoader registered for %q", e.url)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ResourceExistsError struct {
|
||||
url string
|
||||
}
|
||||
|
||||
func (e *ResourceExistsError) Error() string {
|
||||
return fmt.Sprintf("resource for %q already exists", e.url)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// UnmarshalJSON unmarshals into [any] without losing
|
||||
// number precision using [json.Number].
|
||||
func UnmarshalJSON(r io.Reader) (any, error) {
|
||||
decoder := json.NewDecoder(r)
|
||||
decoder.UseNumber()
|
||||
var doc any
|
||||
if err := decoder.Decode(&doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err := decoder.Token(); err == nil || err != io.EOF {
|
||||
return nil, fmt.Errorf("invalid character after top-level value")
|
||||
}
|
||||
return doc, nil
|
||||
}
|
||||
151
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-04/schema
generated
vendored
Normal file
151
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-04/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"positiveInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"positiveIntegerDefault0": {
|
||||
"allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uriref"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": {},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"exclusiveMinimum": true
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/positiveInteger" },
|
||||
"minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": {
|
||||
"anyOf": [
|
||||
{ "type": "boolean" },
|
||||
{ "$ref": "#" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/positiveInteger" },
|
||||
"minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxProperties": { "$ref": "#/definitions/positiveInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "type": "boolean" },
|
||||
{ "$ref": "#" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" },
|
||||
"format": { "type": "string" },
|
||||
"$ref": { "type": "string" }
|
||||
},
|
||||
"dependencies": {
|
||||
"exclusiveMaximum": [ "maximum" ],
|
||||
"exclusiveMinimum": [ "minimum" ]
|
||||
},
|
||||
"default": {}
|
||||
}
|
||||
150
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-06/schema
generated
vendored
Normal file
150
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-06/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-06/schema#",
|
||||
"$id": "http://json-schema.org/draft-06/schema#",
|
||||
"title": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"allOf": [
|
||||
{ "$ref": "#/definitions/nonNegativeInteger" },
|
||||
{ "default": 0 }
|
||||
]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": {},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": { "$ref": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"contains": { "$ref": "#" },
|
||||
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$ref": "#" },
|
||||
"const": {},
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"format": { "type": "string" },
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" }
|
||||
},
|
||||
"default": {}
|
||||
}
|
||||
172
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-07/schema
generated
vendored
Normal file
172
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-07/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"allOf": [
|
||||
{ "$ref": "#/definitions/nonNegativeInteger" },
|
||||
{ "default": 0 }
|
||||
]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": { "$ref": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": true
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"contains": { "$ref": "#" },
|
||||
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$ref": "#" },
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true,
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"format": { "type": "string" },
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentEncoding": { "type": "string" },
|
||||
"if": { "$ref": "#" },
|
||||
"then": { "$ref": "#" },
|
||||
"else": { "$ref": "#" },
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" }
|
||||
},
|
||||
"default": true
|
||||
}
|
||||
55
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/applicator
generated
vendored
Normal file
55
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/applicator
generated
vendored
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/applicator",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/applicator": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Applicator vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"additionalItems": { "$recursiveRef": "#" },
|
||||
"unevaluatedItems": { "$recursiveRef": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$recursiveRef": "#" },
|
||||
{ "$ref": "#/$defs/schemaArray" }
|
||||
]
|
||||
},
|
||||
"contains": { "$recursiveRef": "#" },
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"unevaluatedProperties": { "$recursiveRef": "#" },
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependentSchemas": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$recursiveRef": "#"
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$recursiveRef": "#" },
|
||||
"if": { "$recursiveRef": "#" },
|
||||
"then": { "$recursiveRef": "#" },
|
||||
"else": { "$recursiveRef": "#" },
|
||||
"allOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"anyOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"oneOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"not": { "$recursiveRef": "#" }
|
||||
},
|
||||
"$defs": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$recursiveRef": "#" }
|
||||
}
|
||||
}
|
||||
}
|
||||
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/content
generated
vendored
Normal file
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/content
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/content",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/content": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Content vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentEncoding": { "type": "string" },
|
||||
"contentSchema": { "$recursiveRef": "#" }
|
||||
}
|
||||
}
|
||||
56
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/core
generated
vendored
Normal file
56
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/core
generated
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/core",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/core": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Core vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference",
|
||||
"$comment": "Non-empty fragments not allowed.",
|
||||
"pattern": "^[^#]*#?$"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$anchor": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z][-A-Za-z0-9.:_]*$"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$recursiveRef": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$recursiveAnchor": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"$vocabulary": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"$defs": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"default": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/format
generated
vendored
Normal file
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/format
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/format",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/format": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Format vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"format": { "type": "string" }
|
||||
}
|
||||
}
|
||||
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/meta-data
generated
vendored
Normal file
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/meta-data
generated
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/meta-data",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/meta-data": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Meta-data vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"deprecated": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
}
|
||||
}
|
||||
}
|
||||
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/validation
generated
vendored
Normal file
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/validation
generated
vendored
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/validation",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/validation": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Validation vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minContains": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 1
|
||||
},
|
||||
"maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/$defs/stringArray" },
|
||||
"dependentRequired": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
}
|
||||
},
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/$defs/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/$defs/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 0
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
}
|
||||
}
|
||||
41
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/schema
generated
vendored
Normal file
41
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/core": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/applicator": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/validation": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/meta-data": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/format": false,
|
||||
"https://json-schema.org/draft/2019-09/vocab/content": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Core and Validation specifications meta-schema",
|
||||
"allOf": [
|
||||
{"$ref": "meta/core"},
|
||||
{"$ref": "meta/applicator"},
|
||||
{"$ref": "meta/validation"},
|
||||
{"$ref": "meta/meta-data"},
|
||||
{"$ref": "meta/format"},
|
||||
{"$ref": "meta/content"}
|
||||
],
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"definitions": {
|
||||
"$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.",
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$recursiveRef": "#" },
|
||||
{ "$ref": "meta/validation#/$defs/stringArray" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
47
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/applicator
generated
vendored
Normal file
47
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/applicator
generated
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/applicator",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/applicator": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Applicator vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"prefixItems": { "$ref": "#/$defs/schemaArray" },
|
||||
"items": { "$dynamicRef": "#meta" },
|
||||
"contains": { "$dynamicRef": "#meta" },
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependentSchemas": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"default": {}
|
||||
},
|
||||
"propertyNames": { "$dynamicRef": "#meta" },
|
||||
"if": { "$dynamicRef": "#meta" },
|
||||
"then": { "$dynamicRef": "#meta" },
|
||||
"else": { "$dynamicRef": "#meta" },
|
||||
"allOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"anyOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"oneOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"not": { "$dynamicRef": "#meta" }
|
||||
},
|
||||
"$defs": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
}
|
||||
}
|
||||
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/content
generated
vendored
Normal file
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/content
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/content",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/content": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Content vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"contentEncoding": { "type": "string" },
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentSchema": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
}
|
||||
50
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/core
generated
vendored
Normal file
50
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/core
generated
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/core",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/core": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Core vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"$ref": "#/$defs/uriReferenceString",
|
||||
"$comment": "Non-empty fragments not allowed.",
|
||||
"pattern": "^[^#]*#?$"
|
||||
},
|
||||
"$schema": { "$ref": "#/$defs/uriString" },
|
||||
"$ref": { "$ref": "#/$defs/uriReferenceString" },
|
||||
"$anchor": { "$ref": "#/$defs/anchorString" },
|
||||
"$dynamicRef": { "$ref": "#/$defs/uriReferenceString" },
|
||||
"$dynamicAnchor": { "$ref": "#/$defs/anchorString" },
|
||||
"$vocabulary": {
|
||||
"type": "object",
|
||||
"propertyNames": { "$ref": "#/$defs/uriString" },
|
||||
"additionalProperties": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"$defs": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"anchorString": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z_][-A-Za-z0-9._]*$"
|
||||
},
|
||||
"uriString": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"uriReferenceString": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
}
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-annotation
generated
vendored
Normal file
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-annotation
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/format-annotation",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-annotation": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Format vocabulary meta-schema for annotation results",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"format": { "type": "string" }
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-assertion
generated
vendored
Normal file
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-assertion
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/format-assertion",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-assertion": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Format vocabulary meta-schema for assertion results",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"format": { "type": "string" }
|
||||
}
|
||||
}
|
||||
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/meta-data
generated
vendored
Normal file
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/meta-data
generated
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/meta-data",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/meta-data": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Meta-data vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"deprecated": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
}
|
||||
}
|
||||
}
|
||||
14
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/unevaluated
generated
vendored
Normal file
14
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/unevaluated
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/unevaluated",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/unevaluated": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Unevaluated applicator vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"unevaluatedItems": { "$dynamicRef": "#meta" },
|
||||
"unevaluatedProperties": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
}
|
||||
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/validation
generated
vendored
Normal file
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/validation
generated
vendored
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/validation",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/validation": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Validation vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/$defs/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/$defs/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minContains": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 1
|
||||
},
|
||||
"maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/$defs/stringArray" },
|
||||
"dependentRequired": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
}
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 0
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
}
|
||||
}
|
||||
57
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/schema
generated
vendored
Normal file
57
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/core": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/applicator": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/unevaluated": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/validation": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/meta-data": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-annotation": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/content": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Core and Validation specifications meta-schema",
|
||||
"allOf": [
|
||||
{"$ref": "meta/core"},
|
||||
{"$ref": "meta/applicator"},
|
||||
{"$ref": "meta/unevaluated"},
|
||||
{"$ref": "meta/validation"},
|
||||
{"$ref": "meta/meta-data"},
|
||||
{"$ref": "meta/format-annotation"},
|
||||
{"$ref": "meta/content"}
|
||||
],
|
||||
"type": ["object", "boolean"],
|
||||
"$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.",
|
||||
"properties": {
|
||||
"definitions": {
|
||||
"$comment": "\"definitions\" has been replaced by \"$defs\".",
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"deprecated": true,
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$dynamicRef": "#meta" },
|
||||
{ "$ref": "meta/validation#/$defs/stringArray" }
|
||||
]
|
||||
},
|
||||
"deprecated": true,
|
||||
"default": {}
|
||||
},
|
||||
"$recursiveAnchor": {
|
||||
"$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".",
|
||||
"$ref": "meta/core#/$defs/anchorString",
|
||||
"deprecated": true
|
||||
},
|
||||
"$recursiveRef": {
|
||||
"$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".",
|
||||
"$ref": "meta/core#/$defs/uriReferenceString",
|
||||
"deprecated": true
|
||||
}
|
||||
}
|
||||
}
|
||||
549
vendor/github.com/santhosh-tekuri/jsonschema/v6/objcompiler.go
generated
vendored
Normal file
549
vendor/github.com/santhosh-tekuri/jsonschema/v6/objcompiler.go
generated
vendored
Normal file
|
|
@ -0,0 +1,549 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type objCompiler struct {
|
||||
c *Compiler
|
||||
obj map[string]any
|
||||
up urlPtr
|
||||
r *root
|
||||
res *resource
|
||||
q *queue
|
||||
}
|
||||
|
||||
func (c *objCompiler) compile(s *Schema) error {
|
||||
// id --
|
||||
if id := c.res.dialect.draft.getID(c.obj); id != "" {
|
||||
s.ID = id
|
||||
}
|
||||
|
||||
// anchor --
|
||||
if s.DraftVersion < 2019 {
|
||||
// anchor is specified in id
|
||||
id := c.string(c.res.dialect.draft.id)
|
||||
if id != "" {
|
||||
_, f := split(id)
|
||||
if f != "" {
|
||||
var err error
|
||||
s.Anchor, err = decode(f)
|
||||
if err != nil {
|
||||
return &ParseAnchorError{URL: s.Location}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s.Anchor = c.string("$anchor")
|
||||
}
|
||||
|
||||
if err := c.compileDraft4(s); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.DraftVersion >= 6 {
|
||||
if err := c.compileDraft6(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if s.DraftVersion >= 7 {
|
||||
if err := c.compileDraft7(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if s.DraftVersion >= 2019 {
|
||||
if err := c.compileDraft2019(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if s.DraftVersion >= 2020 {
|
||||
if err := c.compileDraft2020(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// vocabularies
|
||||
vocabs := c.res.dialect.activeVocabs(c.c.roots.assertVocabs, c.c.roots.vocabularies)
|
||||
for _, vocab := range vocabs {
|
||||
v := c.c.roots.vocabularies[vocab]
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
ext, err := v.Compile(&CompilerContext{c}, c.obj)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ext != nil {
|
||||
s.Extensions = append(s.Extensions, ext)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft4(s *Schema) error {
|
||||
var err error
|
||||
|
||||
if c.hasVocab("core") {
|
||||
if s.Ref, err = c.enqueueRef("$ref"); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.DraftVersion < 2019 && s.Ref != nil {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if c.hasVocab("applicator") {
|
||||
s.AllOf = c.enqueueArr("allOf")
|
||||
s.AnyOf = c.enqueueArr("anyOf")
|
||||
s.OneOf = c.enqueueArr("oneOf")
|
||||
s.Not = c.enqueueProp("not")
|
||||
|
||||
if s.DraftVersion < 2020 {
|
||||
if items, ok := c.obj["items"]; ok {
|
||||
if _, ok := items.([]any); ok {
|
||||
s.Items = c.enqueueArr("items")
|
||||
s.AdditionalItems = c.enqueueAdditional("additionalItems")
|
||||
} else {
|
||||
s.Items = c.enqueueProp("items")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
s.Properties = c.enqueueMap("properties")
|
||||
if m := c.enqueueMap("patternProperties"); m != nil {
|
||||
s.PatternProperties = map[Regexp]*Schema{}
|
||||
for pname, sch := range m {
|
||||
re, err := c.c.roots.regexpEngine(pname)
|
||||
if err != nil {
|
||||
return &InvalidRegexError{c.up.format("patternProperties"), pname, err}
|
||||
}
|
||||
s.PatternProperties[re] = sch
|
||||
}
|
||||
}
|
||||
s.AdditionalProperties = c.enqueueAdditional("additionalProperties")
|
||||
|
||||
if m := c.objVal("dependencies"); m != nil {
|
||||
s.Dependencies = map[string]any{}
|
||||
for pname, pvalue := range m {
|
||||
if arr, ok := pvalue.([]any); ok {
|
||||
s.Dependencies[pname] = toStrings(arr)
|
||||
} else {
|
||||
ptr := c.up.ptr.append2("dependencies", pname)
|
||||
s.Dependencies[pname] = c.enqueuePtr(ptr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.hasVocab("validation") {
|
||||
if t, ok := c.obj["type"]; ok {
|
||||
s.Types = newTypes(t)
|
||||
}
|
||||
if arr := c.arrVal("enum"); arr != nil {
|
||||
s.Enum = newEnum(arr)
|
||||
}
|
||||
s.MultipleOf = c.numVal("multipleOf")
|
||||
s.Maximum = c.numVal("maximum")
|
||||
if c.boolean("exclusiveMaximum") {
|
||||
s.ExclusiveMaximum = s.Maximum
|
||||
s.Maximum = nil
|
||||
} else {
|
||||
s.ExclusiveMaximum = c.numVal("exclusiveMaximum")
|
||||
}
|
||||
s.Minimum = c.numVal("minimum")
|
||||
if c.boolean("exclusiveMinimum") {
|
||||
s.ExclusiveMinimum = s.Minimum
|
||||
s.Minimum = nil
|
||||
} else {
|
||||
s.ExclusiveMinimum = c.numVal("exclusiveMinimum")
|
||||
}
|
||||
|
||||
s.MinLength = c.intVal("minLength")
|
||||
s.MaxLength = c.intVal("maxLength")
|
||||
if pat := c.strVal("pattern"); pat != nil {
|
||||
s.Pattern, err = c.c.roots.regexpEngine(*pat)
|
||||
if err != nil {
|
||||
return &InvalidRegexError{c.up.format("pattern"), *pat, err}
|
||||
}
|
||||
}
|
||||
|
||||
s.MinItems = c.intVal("minItems")
|
||||
s.MaxItems = c.intVal("maxItems")
|
||||
s.UniqueItems = c.boolean("uniqueItems")
|
||||
|
||||
s.MaxProperties = c.intVal("maxProperties")
|
||||
s.MinProperties = c.intVal("minProperties")
|
||||
if arr := c.arrVal("required"); arr != nil {
|
||||
s.Required = toStrings(arr)
|
||||
}
|
||||
}
|
||||
|
||||
// format --
|
||||
if c.assertFormat(s.DraftVersion) {
|
||||
if f := c.strVal("format"); f != nil {
|
||||
if *f == "regex" {
|
||||
s.Format = &Format{
|
||||
Name: "regex",
|
||||
Validate: c.c.roots.regexpEngine.validate,
|
||||
}
|
||||
} else {
|
||||
s.Format = c.c.formats[*f]
|
||||
if s.Format == nil {
|
||||
s.Format = formats[*f]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// annotations --
|
||||
s.Title = c.string("title")
|
||||
s.Description = c.string("description")
|
||||
if v, ok := c.obj["default"]; ok {
|
||||
s.Default = &v
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft6(s *Schema) error {
|
||||
if c.hasVocab("applicator") {
|
||||
s.Contains = c.enqueueProp("contains")
|
||||
s.PropertyNames = c.enqueueProp("propertyNames")
|
||||
}
|
||||
if c.hasVocab("validation") {
|
||||
if v, ok := c.obj["const"]; ok {
|
||||
s.Const = &v
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft7(s *Schema) error {
|
||||
if c.hasVocab("applicator") {
|
||||
s.If = c.enqueueProp("if")
|
||||
if s.If != nil {
|
||||
b := c.boolVal("if")
|
||||
if b == nil || *b {
|
||||
s.Then = c.enqueueProp("then")
|
||||
}
|
||||
if b == nil || !*b {
|
||||
s.Else = c.enqueueProp("else")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.c.assertContent {
|
||||
if ce := c.strVal("contentEncoding"); ce != nil {
|
||||
s.ContentEncoding = c.c.decoders[*ce]
|
||||
if s.ContentEncoding == nil {
|
||||
s.ContentEncoding = decoders[*ce]
|
||||
}
|
||||
}
|
||||
if cm := c.strVal("contentMediaType"); cm != nil {
|
||||
s.ContentMediaType = c.c.mediaTypes[*cm]
|
||||
if s.ContentMediaType == nil {
|
||||
s.ContentMediaType = mediaTypes[*cm]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// annotations --
|
||||
s.Comment = c.string("$comment")
|
||||
s.ReadOnly = c.boolean("readOnly")
|
||||
s.WriteOnly = c.boolean("writeOnly")
|
||||
if arr, ok := c.obj["examples"].([]any); ok {
|
||||
s.Examples = arr
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft2019(s *Schema) error {
|
||||
var err error
|
||||
|
||||
if c.hasVocab("core") {
|
||||
if s.RecursiveRef, err = c.enqueueRef("$recursiveRef"); err != nil {
|
||||
return err
|
||||
}
|
||||
s.RecursiveAnchor = c.boolean("$recursiveAnchor")
|
||||
}
|
||||
|
||||
if c.hasVocab("validation") {
|
||||
if s.Contains != nil {
|
||||
s.MinContains = c.intVal("minContains")
|
||||
s.MaxContains = c.intVal("maxContains")
|
||||
}
|
||||
if m := c.objVal("dependentRequired"); m != nil {
|
||||
s.DependentRequired = map[string][]string{}
|
||||
for pname, pvalue := range m {
|
||||
if arr, ok := pvalue.([]any); ok {
|
||||
s.DependentRequired[pname] = toStrings(arr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.hasVocab("applicator") {
|
||||
s.DependentSchemas = c.enqueueMap("dependentSchemas")
|
||||
}
|
||||
|
||||
var unevaluated bool
|
||||
if s.DraftVersion == 2019 {
|
||||
unevaluated = c.hasVocab("applicator")
|
||||
} else {
|
||||
unevaluated = c.hasVocab("unevaluated")
|
||||
}
|
||||
if unevaluated {
|
||||
s.UnevaluatedItems = c.enqueueProp("unevaluatedItems")
|
||||
s.UnevaluatedProperties = c.enqueueProp("unevaluatedProperties")
|
||||
}
|
||||
|
||||
if c.c.assertContent {
|
||||
if s.ContentMediaType != nil && s.ContentMediaType.UnmarshalJSON != nil {
|
||||
s.ContentSchema = c.enqueueProp("contentSchema")
|
||||
}
|
||||
}
|
||||
|
||||
// annotations --
|
||||
s.Deprecated = c.boolean("deprecated")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft2020(s *Schema) error {
|
||||
if c.hasVocab("core") {
|
||||
sch, err := c.enqueueRef("$dynamicRef")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if sch != nil {
|
||||
dref := c.strVal("$dynamicRef")
|
||||
_, frag, err := splitFragment(*dref)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var anch string
|
||||
if anchor, ok := frag.convert().(anchor); ok {
|
||||
anch = string(anchor)
|
||||
}
|
||||
s.DynamicRef = &DynamicRef{sch, anch}
|
||||
}
|
||||
s.DynamicAnchor = c.string("$dynamicAnchor")
|
||||
}
|
||||
|
||||
if c.hasVocab("applicator") {
|
||||
s.PrefixItems = c.enqueueArr("prefixItems")
|
||||
s.Items2020 = c.enqueueProp("items")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// enqueue helpers --
|
||||
|
||||
func (c *objCompiler) enqueuePtr(ptr jsonPointer) *Schema {
|
||||
up := urlPtr{c.up.url, ptr}
|
||||
return c.c.enqueue(c.q, up)
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueRef(pname string) (*Schema, error) {
|
||||
ref := c.strVal(pname)
|
||||
if ref == nil {
|
||||
return nil, nil
|
||||
}
|
||||
baseURL := c.res.id
|
||||
// baseURL := c.r.baseURL(c.up.ptr)
|
||||
uf, err := baseURL.join(*ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
up, err := c.r.resolve(*uf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if up != nil {
|
||||
// local ref
|
||||
return c.enqueuePtr(up.ptr), nil
|
||||
}
|
||||
|
||||
// remote ref
|
||||
up_, err := c.c.roots.resolveFragment(*uf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.c.enqueue(c.q, up_), nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueProp(pname string) *Schema {
|
||||
if _, ok := c.obj[pname]; !ok {
|
||||
return nil
|
||||
}
|
||||
ptr := c.up.ptr.append(pname)
|
||||
return c.enqueuePtr(ptr)
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueArr(pname string) []*Schema {
|
||||
arr := c.arrVal(pname)
|
||||
if arr == nil {
|
||||
return nil
|
||||
}
|
||||
sch := make([]*Schema, len(arr))
|
||||
for i := range arr {
|
||||
ptr := c.up.ptr.append2(pname, strconv.Itoa(i))
|
||||
sch[i] = c.enqueuePtr(ptr)
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueMap(pname string) map[string]*Schema {
|
||||
obj := c.objVal(pname)
|
||||
if obj == nil {
|
||||
return nil
|
||||
}
|
||||
sch := make(map[string]*Schema)
|
||||
for k := range obj {
|
||||
ptr := c.up.ptr.append2(pname, k)
|
||||
sch[k] = c.enqueuePtr(ptr)
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueAdditional(pname string) any {
|
||||
if b := c.boolVal(pname); b != nil {
|
||||
return *b
|
||||
}
|
||||
if sch := c.enqueueProp(pname); sch != nil {
|
||||
return sch
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func (c *objCompiler) hasVocab(name string) bool {
|
||||
return c.res.dialect.hasVocab(name)
|
||||
}
|
||||
|
||||
func (c *objCompiler) assertFormat(draftVersion int) bool {
|
||||
if c.c.assertFormat || draftVersion < 2019 {
|
||||
return true
|
||||
}
|
||||
if draftVersion == 2019 {
|
||||
return c.hasVocab("format")
|
||||
} else {
|
||||
return c.hasVocab("format-assertion")
|
||||
}
|
||||
}
|
||||
|
||||
// value helpers --
|
||||
|
||||
func (c *objCompiler) boolVal(pname string) *bool {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
b, ok := v.(bool)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return &b
|
||||
}
|
||||
|
||||
func (c *objCompiler) boolean(pname string) bool {
|
||||
b := c.boolVal(pname)
|
||||
return b != nil && *b
|
||||
}
|
||||
|
||||
func (c *objCompiler) strVal(pname string) *string {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return &s
|
||||
}
|
||||
|
||||
func (c *objCompiler) string(pname string) string {
|
||||
if s := c.strVal(pname); s != nil {
|
||||
return *s
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (c *objCompiler) numVal(pname string) *big.Rat {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
switch v.(type) {
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
if n, ok := new(big.Rat).SetString(fmt.Sprint(v)); ok {
|
||||
return n
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) intVal(pname string) *int {
|
||||
if n := c.numVal(pname); n != nil && n.IsInt() {
|
||||
n := int(n.Num().Int64())
|
||||
return &n
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) objVal(pname string) map[string]any {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
obj, ok := v.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
func (c *objCompiler) arrVal(pname string) []any {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
arr, ok := v.([]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type InvalidRegexError struct {
|
||||
URL string
|
||||
Regex string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *InvalidRegexError) Error() string {
|
||||
return fmt.Sprintf("invalid regex %q at %q: %v", e.Regex, e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func toStrings(arr []any) []string {
|
||||
var strings []string
|
||||
for _, item := range arr {
|
||||
if s, ok := item.(string); ok {
|
||||
strings = append(strings, s)
|
||||
}
|
||||
}
|
||||
return strings
|
||||
}
|
||||
212
vendor/github.com/santhosh-tekuri/jsonschema/v6/output.go
generated
vendored
Normal file
212
vendor/github.com/santhosh-tekuri/jsonschema/v6/output.go
generated
vendored
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6/kind"
|
||||
"golang.org/x/text/language"
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
var defaultPrinter = message.NewPrinter(language.English)
|
||||
|
||||
// format ---
|
||||
|
||||
func (e *ValidationError) schemaURL() string {
|
||||
if ref, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
return ref.URL
|
||||
} else {
|
||||
return e.SchemaURL
|
||||
}
|
||||
}
|
||||
|
||||
func (e *ValidationError) absoluteKeywordLocation() string {
|
||||
var schemaURL string
|
||||
var keywordPath []string
|
||||
if ref, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
schemaURL = ref.URL
|
||||
keywordPath = nil
|
||||
} else {
|
||||
schemaURL = e.SchemaURL
|
||||
keywordPath = e.ErrorKind.KeywordPath()
|
||||
}
|
||||
return fmt.Sprintf("%s%s", schemaURL, encode(jsonPtr(keywordPath)))
|
||||
}
|
||||
|
||||
func (e *ValidationError) skip() bool {
|
||||
if len(e.Causes) == 1 {
|
||||
_, ok := e.ErrorKind.(*kind.Reference)
|
||||
return ok
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *ValidationError) display(sb *strings.Builder, verbose bool, indent int, absKwLoc string, p *message.Printer) {
|
||||
if !e.skip() {
|
||||
if indent > 0 {
|
||||
sb.WriteByte('\n')
|
||||
for i := 0; i < indent-1; i++ {
|
||||
sb.WriteString(" ")
|
||||
}
|
||||
sb.WriteString("- ")
|
||||
}
|
||||
indent = indent + 1
|
||||
|
||||
prevAbsKwLoc := absKwLoc
|
||||
absKwLoc = e.absoluteKeywordLocation()
|
||||
|
||||
if _, ok := e.ErrorKind.(*kind.Schema); ok {
|
||||
sb.WriteString(e.ErrorKind.LocalizedString(p))
|
||||
} else {
|
||||
sb.WriteString(p.Sprintf("at %s", quote(jsonPtr(e.InstanceLocation))))
|
||||
if verbose {
|
||||
schLoc := absKwLoc
|
||||
if prevAbsKwLoc != "" {
|
||||
pu, _ := split(prevAbsKwLoc)
|
||||
u, f := split(absKwLoc)
|
||||
if u == pu {
|
||||
schLoc = fmt.Sprintf("S#%s", f)
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(sb, " [%s]", schLoc)
|
||||
}
|
||||
fmt.Fprintf(sb, ": %s", e.ErrorKind.LocalizedString(p))
|
||||
}
|
||||
}
|
||||
for _, cause := range e.Causes {
|
||||
cause.display(sb, verbose, indent, absKwLoc, p)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *ValidationError) Error() string {
|
||||
return e.LocalizedError(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedError(p *message.Printer) string {
|
||||
var sb strings.Builder
|
||||
e.display(&sb, false, 0, "", p)
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (e *ValidationError) GoString() string {
|
||||
return e.LocalizedGoString(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedGoString(p *message.Printer) string {
|
||||
var sb strings.Builder
|
||||
e.display(&sb, true, 0, "", p)
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func jsonPtr(tokens []string) string {
|
||||
var sb strings.Builder
|
||||
for _, tok := range tokens {
|
||||
sb.WriteByte('/')
|
||||
sb.WriteString(escape(tok))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// Flag is output format with simple boolean property valid.
|
||||
type FlagOutput struct {
|
||||
Valid bool `json:"valid"`
|
||||
}
|
||||
|
||||
// The `Flag` output format, merely the boolean result.
|
||||
func (e *ValidationError) FlagOutput() *FlagOutput {
|
||||
return &FlagOutput{Valid: false}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type OutputUnit struct {
|
||||
Valid bool `json:"valid"`
|
||||
KeywordLocation string `json:"keywordLocation"`
|
||||
AbsoluteKeywordLocation string `json:"AbsoluteKeywordLocation,omitempty"`
|
||||
InstanceLocation string `json:"instanceLocation"`
|
||||
Error *OutputError `json:"error,omitempty"`
|
||||
Errors []OutputUnit `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
type OutputError struct {
|
||||
Kind ErrorKind
|
||||
p *message.Printer
|
||||
}
|
||||
|
||||
func (k OutputError) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(k.Kind.LocalizedString(k.p))
|
||||
}
|
||||
|
||||
// The `Basic` structure, a flat list of output units.
|
||||
func (e *ValidationError) BasicOutput() *OutputUnit {
|
||||
return e.LocalizedBasicOutput(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedBasicOutput(p *message.Printer) *OutputUnit {
|
||||
out := e.output(true, false, "", "", p)
|
||||
return &out
|
||||
}
|
||||
|
||||
// The `Detailed` structure, based on the schema.
|
||||
func (e *ValidationError) DetailedOutput() *OutputUnit {
|
||||
return e.LocalizedDetailedOutput(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedDetailedOutput(p *message.Printer) *OutputUnit {
|
||||
out := e.output(false, false, "", "", p)
|
||||
return &out
|
||||
}
|
||||
|
||||
func (e *ValidationError) output(flatten, inRef bool, schemaURL, kwLoc string, p *message.Printer) OutputUnit {
|
||||
if !inRef {
|
||||
if _, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
inRef = true
|
||||
}
|
||||
}
|
||||
if schemaURL != "" {
|
||||
kwLoc += e.SchemaURL[len(schemaURL):]
|
||||
if ref, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
kwLoc += jsonPtr(ref.KeywordPath())
|
||||
}
|
||||
}
|
||||
schemaURL = e.schemaURL()
|
||||
|
||||
keywordLocation := kwLoc
|
||||
if _, ok := e.ErrorKind.(*kind.Reference); !ok {
|
||||
keywordLocation += jsonPtr(e.ErrorKind.KeywordPath())
|
||||
}
|
||||
|
||||
out := OutputUnit{
|
||||
Valid: false,
|
||||
InstanceLocation: jsonPtr(e.InstanceLocation),
|
||||
KeywordLocation: keywordLocation,
|
||||
}
|
||||
if inRef {
|
||||
out.AbsoluteKeywordLocation = e.absoluteKeywordLocation()
|
||||
}
|
||||
for _, cause := range e.Causes {
|
||||
causeOut := cause.output(flatten, inRef, schemaURL, kwLoc, p)
|
||||
if cause.skip() {
|
||||
causeOut = causeOut.Errors[0]
|
||||
}
|
||||
if flatten {
|
||||
errors := causeOut.Errors
|
||||
causeOut.Errors = nil
|
||||
causeOut.Error = &OutputError{cause.ErrorKind, p}
|
||||
out.Errors = append(out.Errors, causeOut)
|
||||
if len(errors) > 0 {
|
||||
out.Errors = append(out.Errors, errors...)
|
||||
}
|
||||
} else {
|
||||
out.Errors = append(out.Errors, causeOut)
|
||||
}
|
||||
}
|
||||
if len(out.Errors) == 0 {
|
||||
out.Error = &OutputError{e.ErrorKind, p}
|
||||
}
|
||||
return out
|
||||
}
|
||||
142
vendor/github.com/santhosh-tekuri/jsonschema/v6/position.go
generated
vendored
Normal file
142
vendor/github.com/santhosh-tekuri/jsonschema/v6/position.go
generated
vendored
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Position tells possible tokens in json.
|
||||
type Position interface {
|
||||
collect(v any, ptr jsonPointer) map[jsonPointer]any
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AllProp struct{}
|
||||
|
||||
func (AllProp) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
obj, ok := v.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
m := map[jsonPointer]any{}
|
||||
for pname, pvalue := range obj {
|
||||
m[ptr.append(pname)] = pvalue
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AllItem struct{}
|
||||
|
||||
func (AllItem) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
arr, ok := v.([]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
m := map[jsonPointer]any{}
|
||||
for i, item := range arr {
|
||||
m[ptr.append(strconv.Itoa(i))] = item
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Prop string
|
||||
|
||||
func (p Prop) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
obj, ok := v.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
pvalue, ok := obj[string(p)]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return map[jsonPointer]any{
|
||||
ptr.append(string(p)): pvalue,
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Item int
|
||||
|
||||
func (i Item) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
arr, ok := v.([]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if i < 0 || int(i) >= len(arr) {
|
||||
return nil
|
||||
}
|
||||
return map[jsonPointer]any{
|
||||
ptr.append(strconv.Itoa(int(i))): arr[int(i)],
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// SchemaPath tells where to look for subschema inside keyword.
|
||||
type SchemaPath []Position
|
||||
|
||||
func schemaPath(path string) SchemaPath {
|
||||
var sp SchemaPath
|
||||
for _, tok := range strings.Split(path, "/") {
|
||||
var pos Position
|
||||
switch tok {
|
||||
case "*":
|
||||
pos = AllProp{}
|
||||
case "[]":
|
||||
pos = AllItem{}
|
||||
default:
|
||||
if i, err := strconv.Atoi(tok); err == nil {
|
||||
pos = Item(i)
|
||||
} else {
|
||||
pos = Prop(tok)
|
||||
}
|
||||
}
|
||||
sp = append(sp, pos)
|
||||
}
|
||||
return sp
|
||||
}
|
||||
|
||||
func (sp SchemaPath) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
if len(sp) == 0 {
|
||||
return map[jsonPointer]any{
|
||||
ptr: v,
|
||||
}
|
||||
}
|
||||
p, sp := sp[0], sp[1:]
|
||||
m := p.collect(v, ptr)
|
||||
mm := map[jsonPointer]any{}
|
||||
for ptr, v := range m {
|
||||
m = sp.collect(v, ptr)
|
||||
for k, v := range m {
|
||||
mm[k] = v
|
||||
}
|
||||
}
|
||||
return mm
|
||||
}
|
||||
|
||||
func (sp SchemaPath) String() string {
|
||||
var sb strings.Builder
|
||||
for _, pos := range sp {
|
||||
if sb.Len() != 0 {
|
||||
sb.WriteByte('/')
|
||||
}
|
||||
switch pos := pos.(type) {
|
||||
case AllProp:
|
||||
sb.WriteString("*")
|
||||
case AllItem:
|
||||
sb.WriteString("[]")
|
||||
case Prop:
|
||||
sb.WriteString(string(pos))
|
||||
case Item:
|
||||
sb.WriteString(strconv.Itoa(int(pos)))
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
202
vendor/github.com/santhosh-tekuri/jsonschema/v6/root.go
generated
vendored
Normal file
202
vendor/github.com/santhosh-tekuri/jsonschema/v6/root.go
generated
vendored
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type root struct {
|
||||
url url
|
||||
doc any
|
||||
resources map[jsonPointer]*resource
|
||||
subschemasProcessed map[jsonPointer]struct{}
|
||||
}
|
||||
|
||||
func (r *root) rootResource() *resource {
|
||||
return r.resources[""]
|
||||
}
|
||||
|
||||
func (r *root) resource(ptr jsonPointer) *resource {
|
||||
for {
|
||||
if res, ok := r.resources[ptr]; ok {
|
||||
return res
|
||||
}
|
||||
slash := strings.LastIndexByte(string(ptr), '/')
|
||||
if slash == -1 {
|
||||
break
|
||||
}
|
||||
ptr = ptr[:slash]
|
||||
}
|
||||
return r.rootResource()
|
||||
}
|
||||
|
||||
func (r *root) resolveFragmentIn(frag fragment, res *resource) (urlPtr, error) {
|
||||
var ptr jsonPointer
|
||||
switch f := frag.convert().(type) {
|
||||
case jsonPointer:
|
||||
ptr = res.ptr.concat(f)
|
||||
case anchor:
|
||||
aptr, ok := res.anchors[f]
|
||||
if !ok {
|
||||
return urlPtr{}, &AnchorNotFoundError{
|
||||
URL: r.url.String(),
|
||||
Reference: (&urlFrag{res.id, frag}).String(),
|
||||
}
|
||||
}
|
||||
ptr = aptr
|
||||
}
|
||||
return urlPtr{r.url, ptr}, nil
|
||||
}
|
||||
|
||||
func (r *root) resolveFragment(frag fragment) (urlPtr, error) {
|
||||
return r.resolveFragmentIn(frag, r.rootResource())
|
||||
}
|
||||
|
||||
// resovles urlFrag to urlPtr from root.
|
||||
// returns nil if it is external.
|
||||
func (r *root) resolve(uf urlFrag) (*urlPtr, error) {
|
||||
var res *resource
|
||||
if uf.url == r.url {
|
||||
res = r.rootResource()
|
||||
} else {
|
||||
// look for resource with id==uf.url
|
||||
for _, v := range r.resources {
|
||||
if v.id == uf.url {
|
||||
res = v
|
||||
break
|
||||
}
|
||||
}
|
||||
if res == nil {
|
||||
return nil, nil // external url
|
||||
}
|
||||
}
|
||||
up, err := r.resolveFragmentIn(uf.frag, res)
|
||||
return &up, err
|
||||
}
|
||||
|
||||
func (r *root) collectAnchors(sch any, schPtr jsonPointer, res *resource) error {
|
||||
obj, ok := sch.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
addAnchor := func(anchor anchor) error {
|
||||
ptr1, ok := res.anchors[anchor]
|
||||
if ok {
|
||||
if ptr1 == schPtr {
|
||||
// anchor with same root_ptr already exists
|
||||
return nil
|
||||
}
|
||||
return &DuplicateAnchorError{
|
||||
string(anchor), r.url.String(), string(ptr1), string(schPtr),
|
||||
}
|
||||
}
|
||||
res.anchors[anchor] = schPtr
|
||||
return nil
|
||||
}
|
||||
|
||||
if res.dialect.draft.version < 2019 {
|
||||
if _, ok := obj["$ref"]; ok {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return nil
|
||||
}
|
||||
// anchor is specified in id
|
||||
if id, ok := strVal(obj, res.dialect.draft.id); ok {
|
||||
_, frag, err := splitFragment(id)
|
||||
if err != nil {
|
||||
loc := urlPtr{r.url, schPtr}
|
||||
return &ParseAnchorError{loc.String()}
|
||||
}
|
||||
if anchor, ok := frag.convert().(anchor); ok {
|
||||
if err := addAnchor(anchor); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if res.dialect.draft.version >= 2019 {
|
||||
if s, ok := strVal(obj, "$anchor"); ok {
|
||||
if err := addAnchor(anchor(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if res.dialect.draft.version >= 2020 {
|
||||
if s, ok := strVal(obj, "$dynamicAnchor"); ok {
|
||||
if err := addAnchor(anchor(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
res.dynamicAnchors = append(res.dynamicAnchors, anchor(s))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *root) clone() *root {
|
||||
processed := map[jsonPointer]struct{}{}
|
||||
for k := range r.subschemasProcessed {
|
||||
processed[k] = struct{}{}
|
||||
}
|
||||
resources := map[jsonPointer]*resource{}
|
||||
for k, v := range r.resources {
|
||||
resources[k] = v.clone()
|
||||
}
|
||||
return &root{
|
||||
url: r.url,
|
||||
doc: r.doc,
|
||||
resources: resources,
|
||||
subschemasProcessed: processed,
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type resource struct {
|
||||
ptr jsonPointer
|
||||
id url
|
||||
dialect dialect
|
||||
anchors map[anchor]jsonPointer
|
||||
dynamicAnchors []anchor
|
||||
}
|
||||
|
||||
func newResource(ptr jsonPointer, id url) *resource {
|
||||
return &resource{ptr: ptr, id: id, anchors: make(map[anchor]jsonPointer)}
|
||||
}
|
||||
|
||||
func (res *resource) clone() *resource {
|
||||
anchors := map[anchor]jsonPointer{}
|
||||
for k, v := range res.anchors {
|
||||
anchors[k] = v
|
||||
}
|
||||
return &resource{
|
||||
ptr: res.ptr,
|
||||
id: res.id,
|
||||
dialect: res.dialect,
|
||||
anchors: anchors,
|
||||
dynamicAnchors: slices.Clone(res.dynamicAnchors),
|
||||
}
|
||||
}
|
||||
|
||||
//--
|
||||
|
||||
type UnsupportedVocabularyError struct {
|
||||
URL string
|
||||
Vocabulary string
|
||||
}
|
||||
|
||||
func (e *UnsupportedVocabularyError) Error() string {
|
||||
return fmt.Sprintf("unsupported vocabulary %q in %q", e.Vocabulary, e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AnchorNotFoundError struct {
|
||||
URL string
|
||||
Reference string
|
||||
}
|
||||
|
||||
func (e *AnchorNotFoundError) Error() string {
|
||||
return fmt.Sprintf("anchor in %q not found in schema %q", e.Reference, e.URL)
|
||||
}
|
||||
289
vendor/github.com/santhosh-tekuri/jsonschema/v6/roots.go
generated
vendored
Normal file
289
vendor/github.com/santhosh-tekuri/jsonschema/v6/roots.go
generated
vendored
Normal file
|
|
@ -0,0 +1,289 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type roots struct {
|
||||
defaultDraft *Draft
|
||||
roots map[url]*root
|
||||
loader defaultLoader
|
||||
regexpEngine RegexpEngine
|
||||
vocabularies map[string]*Vocabulary
|
||||
assertVocabs bool
|
||||
}
|
||||
|
||||
func newRoots() *roots {
|
||||
return &roots{
|
||||
defaultDraft: draftLatest,
|
||||
roots: map[url]*root{},
|
||||
loader: defaultLoader{
|
||||
docs: map[url]any{},
|
||||
loader: FileLoader{},
|
||||
},
|
||||
regexpEngine: goRegexpCompile,
|
||||
vocabularies: map[string]*Vocabulary{},
|
||||
}
|
||||
}
|
||||
|
||||
func (rr *roots) orLoad(u url) (*root, error) {
|
||||
if r, ok := rr.roots[u]; ok {
|
||||
return r, nil
|
||||
}
|
||||
doc, err := rr.loader.load(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rr.addRoot(u, doc)
|
||||
}
|
||||
|
||||
func (rr *roots) addRoot(u url, doc any) (*root, error) {
|
||||
r := &root{
|
||||
url: u,
|
||||
doc: doc,
|
||||
resources: map[jsonPointer]*resource{},
|
||||
subschemasProcessed: map[jsonPointer]struct{}{},
|
||||
}
|
||||
if err := rr.collectResources(r, doc, u, "", dialect{rr.defaultDraft, nil}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !strings.HasPrefix(u.String(), "http://json-schema.org/") &&
|
||||
!strings.HasPrefix(u.String(), "https://json-schema.org/") {
|
||||
if err := rr.validate(r, doc, ""); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
rr.roots[u] = r
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (rr *roots) resolveFragment(uf urlFrag) (urlPtr, error) {
|
||||
r, err := rr.orLoad(uf.url)
|
||||
if err != nil {
|
||||
return urlPtr{}, err
|
||||
}
|
||||
return r.resolveFragment(uf.frag)
|
||||
}
|
||||
|
||||
func (rr *roots) collectResources(r *root, sch any, base url, schPtr jsonPointer, fallback dialect) error {
|
||||
if _, ok := r.subschemasProcessed[schPtr]; ok {
|
||||
return nil
|
||||
}
|
||||
if err := rr._collectResources(r, sch, base, schPtr, fallback); err != nil {
|
||||
return err
|
||||
}
|
||||
r.subschemasProcessed[schPtr] = struct{}{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) _collectResources(r *root, sch any, base url, schPtr jsonPointer, fallback dialect) error {
|
||||
if _, ok := sch.(bool); ok {
|
||||
if schPtr.isEmpty() {
|
||||
// root resource
|
||||
res := newResource(schPtr, base)
|
||||
res.dialect = fallback
|
||||
r.resources[schPtr] = res
|
||||
}
|
||||
return nil
|
||||
}
|
||||
obj, ok := sch.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
hasSchema := false
|
||||
if sch, ok := obj["$schema"]; ok {
|
||||
if _, ok := sch.(string); ok {
|
||||
hasSchema = true
|
||||
}
|
||||
}
|
||||
|
||||
draft, err := rr.loader.getDraft(urlPtr{r.url, schPtr}, sch, fallback.draft, map[url]struct{}{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
id := draft.getID(obj)
|
||||
if id == "" && !schPtr.isEmpty() {
|
||||
// ignore $schema
|
||||
draft = fallback.draft
|
||||
hasSchema = false
|
||||
id = draft.getID(obj)
|
||||
}
|
||||
|
||||
var res *resource
|
||||
if id != "" {
|
||||
uf, err := base.join(id)
|
||||
if err != nil {
|
||||
loc := urlPtr{r.url, schPtr}
|
||||
return &ParseIDError{loc.String()}
|
||||
}
|
||||
base = uf.url
|
||||
res = newResource(schPtr, base)
|
||||
} else if schPtr.isEmpty() {
|
||||
// root resource
|
||||
res = newResource(schPtr, base)
|
||||
}
|
||||
|
||||
if res != nil {
|
||||
found := false
|
||||
for _, res := range r.resources {
|
||||
if res.id == base {
|
||||
found = true
|
||||
if res.ptr != schPtr {
|
||||
return &DuplicateIDError{base.String(), r.url.String(), string(schPtr), string(res.ptr)}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
if hasSchema {
|
||||
vocabs, err := rr.loader.getMetaVocabs(sch, draft, rr.vocabularies)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res.dialect = dialect{draft, vocabs}
|
||||
} else {
|
||||
res.dialect = fallback
|
||||
}
|
||||
r.resources[schPtr] = res
|
||||
}
|
||||
}
|
||||
|
||||
var baseRes *resource
|
||||
for _, res := range r.resources {
|
||||
if res.id == base {
|
||||
baseRes = res
|
||||
break
|
||||
}
|
||||
}
|
||||
if baseRes == nil {
|
||||
panic("baseres is nil")
|
||||
}
|
||||
|
||||
// found base resource
|
||||
if err := r.collectAnchors(sch, schPtr, baseRes); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// process subschemas
|
||||
subschemas := map[jsonPointer]any{}
|
||||
for _, sp := range draft.subschemas {
|
||||
ss := sp.collect(obj, schPtr)
|
||||
for k, v := range ss {
|
||||
subschemas[k] = v
|
||||
}
|
||||
}
|
||||
for _, vocab := range baseRes.dialect.activeVocabs(true, rr.vocabularies) {
|
||||
if v := rr.vocabularies[vocab]; v != nil {
|
||||
for _, sp := range v.Subschemas {
|
||||
ss := sp.collect(obj, schPtr)
|
||||
for k, v := range ss {
|
||||
subschemas[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for ptr, v := range subschemas {
|
||||
if err := rr.collectResources(r, v, base, ptr, baseRes.dialect); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) ensureSubschema(up urlPtr) error {
|
||||
r, err := rr.orLoad(up.url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, ok := r.subschemasProcessed[up.ptr]; ok {
|
||||
return nil
|
||||
}
|
||||
v, err := up.lookup(r.doc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rClone := r.clone()
|
||||
if err := rr.addSubschema(rClone, up.ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := rr.validate(rClone, v, up.ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
rr.roots[r.url] = rClone
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) addSubschema(r *root, ptr jsonPointer) error {
|
||||
v, err := (&urlPtr{r.url, ptr}).lookup(r.doc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
base := r.resource(ptr)
|
||||
baseURL := base.id
|
||||
if err := rr.collectResources(r, v, baseURL, ptr, base.dialect); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// collect anchors
|
||||
if _, ok := r.resources[ptr]; !ok {
|
||||
res := r.resource(ptr)
|
||||
if err := r.collectAnchors(v, ptr, res); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) validate(r *root, v any, ptr jsonPointer) error {
|
||||
dialect := r.resource(ptr).dialect
|
||||
meta := dialect.getSchema(rr.assertVocabs, rr.vocabularies)
|
||||
if err := meta.validate(v, rr.regexpEngine, meta, r.resources, rr.assertVocabs, rr.vocabularies); err != nil {
|
||||
up := urlPtr{r.url, ptr}
|
||||
return &SchemaValidationError{URL: up.String(), Err: err}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type InvalidMetaSchemaURLError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *InvalidMetaSchemaURLError) Error() string {
|
||||
return fmt.Sprintf("invalid $schema in %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type UnsupportedDraftError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *UnsupportedDraftError) Error() string {
|
||||
return fmt.Sprintf("draft %q is not supported", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MetaSchemaCycleError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *MetaSchemaCycleError) Error() string {
|
||||
return fmt.Sprintf("cycle in resolving $schema in %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MetaSchemaMismatchError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *MetaSchemaMismatchError) Error() string {
|
||||
return fmt.Sprintf("$schema in %q does not match with $schema in root", e.URL)
|
||||
}
|
||||
248
vendor/github.com/santhosh-tekuri/jsonschema/v6/schema.go
generated
vendored
Normal file
248
vendor/github.com/santhosh-tekuri/jsonschema/v6/schema.go
generated
vendored
Normal file
|
|
@ -0,0 +1,248 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
)
|
||||
|
||||
// Schema is the regpresentation of a compiled
|
||||
// jsonschema.
|
||||
type Schema struct {
|
||||
up urlPtr
|
||||
resource *Schema
|
||||
dynamicAnchors map[string]*Schema
|
||||
allPropsEvaluated bool
|
||||
allItemsEvaluated bool
|
||||
numItemsEvaluated int
|
||||
|
||||
DraftVersion int
|
||||
Location string
|
||||
|
||||
// type agnostic --
|
||||
Bool *bool // boolean schema
|
||||
ID string
|
||||
Ref *Schema
|
||||
Anchor string
|
||||
RecursiveRef *Schema
|
||||
RecursiveAnchor bool
|
||||
DynamicRef *DynamicRef
|
||||
DynamicAnchor string // "" if not specified
|
||||
Types *Types
|
||||
Enum *Enum
|
||||
Const *any
|
||||
Not *Schema
|
||||
AllOf []*Schema
|
||||
AnyOf []*Schema
|
||||
OneOf []*Schema
|
||||
If *Schema
|
||||
Then *Schema
|
||||
Else *Schema
|
||||
Format *Format
|
||||
|
||||
// object --
|
||||
MaxProperties *int
|
||||
MinProperties *int
|
||||
Required []string
|
||||
PropertyNames *Schema
|
||||
Properties map[string]*Schema
|
||||
PatternProperties map[Regexp]*Schema
|
||||
AdditionalProperties any // nil or bool or *Schema
|
||||
Dependencies map[string]any // value is []string or *Schema
|
||||
DependentRequired map[string][]string
|
||||
DependentSchemas map[string]*Schema
|
||||
UnevaluatedProperties *Schema
|
||||
|
||||
// array --
|
||||
MinItems *int
|
||||
MaxItems *int
|
||||
UniqueItems bool
|
||||
Contains *Schema
|
||||
MinContains *int
|
||||
MaxContains *int
|
||||
Items any // nil or []*Schema or *Schema
|
||||
AdditionalItems any // nil or bool or *Schema
|
||||
PrefixItems []*Schema
|
||||
Items2020 *Schema
|
||||
UnevaluatedItems *Schema
|
||||
|
||||
// string --
|
||||
MinLength *int
|
||||
MaxLength *int
|
||||
Pattern Regexp
|
||||
ContentEncoding *Decoder
|
||||
ContentMediaType *MediaType
|
||||
ContentSchema *Schema
|
||||
|
||||
// number --
|
||||
Maximum *big.Rat
|
||||
Minimum *big.Rat
|
||||
ExclusiveMaximum *big.Rat
|
||||
ExclusiveMinimum *big.Rat
|
||||
MultipleOf *big.Rat
|
||||
|
||||
Extensions []SchemaExt
|
||||
|
||||
// annotations --
|
||||
Title string
|
||||
Description string
|
||||
Default *any
|
||||
Comment string
|
||||
ReadOnly bool
|
||||
WriteOnly bool
|
||||
Examples []any
|
||||
Deprecated bool
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type jsonType int
|
||||
|
||||
const (
|
||||
invalidType jsonType = 0
|
||||
nullType jsonType = 1 << iota
|
||||
booleanType
|
||||
numberType
|
||||
integerType
|
||||
stringType
|
||||
arrayType
|
||||
objectType
|
||||
)
|
||||
|
||||
func typeOf(v any) jsonType {
|
||||
switch v.(type) {
|
||||
case nil:
|
||||
return nullType
|
||||
case bool:
|
||||
return booleanType
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
return numberType
|
||||
case string:
|
||||
return stringType
|
||||
case []any:
|
||||
return arrayType
|
||||
case map[string]any:
|
||||
return objectType
|
||||
default:
|
||||
return invalidType
|
||||
}
|
||||
}
|
||||
|
||||
func typeFromString(s string) jsonType {
|
||||
switch s {
|
||||
case "null":
|
||||
return nullType
|
||||
case "boolean":
|
||||
return booleanType
|
||||
case "number":
|
||||
return numberType
|
||||
case "integer":
|
||||
return integerType
|
||||
case "string":
|
||||
return stringType
|
||||
case "array":
|
||||
return arrayType
|
||||
case "object":
|
||||
return objectType
|
||||
}
|
||||
return invalidType
|
||||
}
|
||||
|
||||
func (jt jsonType) String() string {
|
||||
switch jt {
|
||||
case nullType:
|
||||
return "null"
|
||||
case booleanType:
|
||||
return "boolean"
|
||||
case numberType:
|
||||
return "number"
|
||||
case integerType:
|
||||
return "integer"
|
||||
case stringType:
|
||||
return "string"
|
||||
case arrayType:
|
||||
return "array"
|
||||
case objectType:
|
||||
return "object"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// Types encapsulates list of json value types.
|
||||
type Types int
|
||||
|
||||
func newTypes(v any) *Types {
|
||||
var types Types
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
types.add(typeFromString(v))
|
||||
case []any:
|
||||
for _, item := range v {
|
||||
if s, ok := item.(string); ok {
|
||||
types.add(typeFromString(s))
|
||||
}
|
||||
}
|
||||
}
|
||||
if types.IsEmpty() {
|
||||
return nil
|
||||
}
|
||||
return &types
|
||||
}
|
||||
|
||||
func (tt Types) IsEmpty() bool {
|
||||
return tt == 0
|
||||
}
|
||||
|
||||
func (tt *Types) add(t jsonType) {
|
||||
*tt = Types(int(*tt) | int(t))
|
||||
}
|
||||
|
||||
func (tt Types) contains(t jsonType) bool {
|
||||
return int(tt)&int(t) != 0
|
||||
}
|
||||
|
||||
func (tt Types) ToStrings() []string {
|
||||
types := []jsonType{
|
||||
nullType, booleanType, numberType, integerType,
|
||||
stringType, arrayType, objectType,
|
||||
}
|
||||
var arr []string
|
||||
for _, t := range types {
|
||||
if tt.contains(t) {
|
||||
arr = append(arr, t.String())
|
||||
}
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
func (tt Types) String() string {
|
||||
return fmt.Sprintf("%v", tt.ToStrings())
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Enum struct {
|
||||
Values []any
|
||||
types Types
|
||||
}
|
||||
|
||||
func newEnum(arr []any) *Enum {
|
||||
var types Types
|
||||
for _, item := range arr {
|
||||
types.add(typeOf(item))
|
||||
}
|
||||
return &Enum{arr, types}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DynamicRef struct {
|
||||
Ref *Schema
|
||||
Anchor string // "" if not specified
|
||||
}
|
||||
|
||||
func newSchema(up urlPtr) *Schema {
|
||||
return &Schema{up: up, Location: up.String()}
|
||||
}
|
||||
464
vendor/github.com/santhosh-tekuri/jsonschema/v6/util.go
generated
vendored
Normal file
464
vendor/github.com/santhosh-tekuri/jsonschema/v6/util.go
generated
vendored
Normal file
|
|
@ -0,0 +1,464 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"math/big"
|
||||
gourl "net/url"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6/kind"
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
// --
|
||||
|
||||
type url (string)
|
||||
|
||||
func (u url) String() string {
|
||||
return string(u)
|
||||
}
|
||||
|
||||
func (u url) join(ref string) (*urlFrag, error) {
|
||||
base, err := gourl.Parse(string(u))
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: u.String(), Err: err}
|
||||
}
|
||||
|
||||
ref, frag, err := splitFragment(ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
refURL, err := gourl.Parse(ref)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: ref, Err: err}
|
||||
}
|
||||
resolved := base.ResolveReference(refURL)
|
||||
|
||||
// see https://github.com/golang/go/issues/66084 (net/url: ResolveReference ignores Opaque value)
|
||||
if !refURL.IsAbs() && base.Opaque != "" {
|
||||
resolved.Opaque = base.Opaque
|
||||
}
|
||||
|
||||
return &urlFrag{url: url(resolved.String()), frag: frag}, nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type jsonPointer string
|
||||
|
||||
func escape(tok string) string {
|
||||
tok = strings.ReplaceAll(tok, "~", "~0")
|
||||
tok = strings.ReplaceAll(tok, "/", "~1")
|
||||
return tok
|
||||
}
|
||||
|
||||
func unescape(tok string) (string, bool) {
|
||||
tilde := strings.IndexByte(tok, '~')
|
||||
if tilde == -1 {
|
||||
return tok, true
|
||||
}
|
||||
sb := new(strings.Builder)
|
||||
for {
|
||||
sb.WriteString(tok[:tilde])
|
||||
tok = tok[tilde+1:]
|
||||
if tok == "" {
|
||||
return "", false
|
||||
}
|
||||
switch tok[0] {
|
||||
case '0':
|
||||
sb.WriteByte('~')
|
||||
case '1':
|
||||
sb.WriteByte('/')
|
||||
default:
|
||||
return "", false
|
||||
}
|
||||
tok = tok[1:]
|
||||
tilde = strings.IndexByte(tok, '~')
|
||||
if tilde == -1 {
|
||||
sb.WriteString(tok)
|
||||
break
|
||||
}
|
||||
}
|
||||
return sb.String(), true
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) isEmpty() bool {
|
||||
return string(ptr) == ""
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) concat(next jsonPointer) jsonPointer {
|
||||
return jsonPointer(fmt.Sprintf("%s%s", ptr, next))
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) append(tok string) jsonPointer {
|
||||
return jsonPointer(fmt.Sprintf("%s/%s", ptr, escape(tok)))
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) append2(tok1, tok2 string) jsonPointer {
|
||||
return jsonPointer(fmt.Sprintf("%s/%s/%s", ptr, escape(tok1), escape(tok2)))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type anchor string
|
||||
|
||||
// --
|
||||
|
||||
type fragment string
|
||||
|
||||
func decode(frag string) (string, error) {
|
||||
return gourl.PathUnescape(frag)
|
||||
}
|
||||
|
||||
// avoids escaping /.
|
||||
func encode(frag string) string {
|
||||
var sb strings.Builder
|
||||
for i, tok := range strings.Split(frag, "/") {
|
||||
if i > 0 {
|
||||
sb.WriteByte('/')
|
||||
}
|
||||
sb.WriteString(gourl.PathEscape(tok))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func splitFragment(str string) (string, fragment, error) {
|
||||
u, f := split(str)
|
||||
f, err := decode(f)
|
||||
if err != nil {
|
||||
return "", fragment(""), &ParseURLError{URL: str, Err: err}
|
||||
}
|
||||
return u, fragment(f), nil
|
||||
}
|
||||
|
||||
func split(str string) (string, string) {
|
||||
hash := strings.IndexByte(str, '#')
|
||||
if hash == -1 {
|
||||
return str, ""
|
||||
}
|
||||
return str[:hash], str[hash+1:]
|
||||
}
|
||||
|
||||
func (frag fragment) convert() any {
|
||||
str := string(frag)
|
||||
if str == "" || strings.HasPrefix(str, "/") {
|
||||
return jsonPointer(str)
|
||||
}
|
||||
return anchor(str)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type urlFrag struct {
|
||||
url url
|
||||
frag fragment
|
||||
}
|
||||
|
||||
func startsWithWindowsDrive(s string) bool {
|
||||
if s != "" && strings.HasPrefix(s[1:], `:\`) {
|
||||
return (s[0] >= 'a' && s[0] <= 'z') || (s[0] >= 'A' && s[0] <= 'Z')
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func absolute(input string) (*urlFrag, error) {
|
||||
u, frag, err := splitFragment(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if windows absolute file path, convert to file url
|
||||
// because: net/url parses driver name as scheme
|
||||
if runtime.GOOS == "windows" && startsWithWindowsDrive(u) {
|
||||
u = "file:///" + filepath.ToSlash(u)
|
||||
}
|
||||
|
||||
gourl, err := gourl.Parse(u)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: input, Err: err}
|
||||
}
|
||||
if gourl.IsAbs() {
|
||||
return &urlFrag{url(u), frag}, nil
|
||||
}
|
||||
|
||||
// avoid filesystem api in wasm
|
||||
if runtime.GOOS != "js" {
|
||||
abs, err := filepath.Abs(u)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: input, Err: err}
|
||||
}
|
||||
u = abs
|
||||
}
|
||||
if !strings.HasPrefix(u, "/") {
|
||||
u = "/" + u
|
||||
}
|
||||
u = "file://" + filepath.ToSlash(u)
|
||||
|
||||
_, err = gourl.Parse(u)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: input, Err: err}
|
||||
}
|
||||
return &urlFrag{url: url(u), frag: frag}, nil
|
||||
}
|
||||
|
||||
func (uf *urlFrag) String() string {
|
||||
return fmt.Sprintf("%s#%s", uf.url, encode(string(uf.frag)))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type urlPtr struct {
|
||||
url url
|
||||
ptr jsonPointer
|
||||
}
|
||||
|
||||
func (up *urlPtr) lookup(v any) (any, error) {
|
||||
for _, tok := range strings.Split(string(up.ptr), "/")[1:] {
|
||||
tok, ok := unescape(tok)
|
||||
if !ok {
|
||||
return nil, &InvalidJsonPointerError{up.String()}
|
||||
}
|
||||
switch val := v.(type) {
|
||||
case map[string]any:
|
||||
if pvalue, ok := val[tok]; ok {
|
||||
v = pvalue
|
||||
continue
|
||||
}
|
||||
case []any:
|
||||
if index, err := strconv.Atoi(tok); err == nil {
|
||||
if index >= 0 && index < len(val) {
|
||||
v = val[index]
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, &JSONPointerNotFoundError{up.String()}
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func (up *urlPtr) format(tok string) string {
|
||||
return fmt.Sprintf("%s#%s/%s", up.url, encode(string(up.ptr)), encode(escape(tok)))
|
||||
}
|
||||
|
||||
func (up *urlPtr) String() string {
|
||||
return fmt.Sprintf("%s#%s", up.url, encode(string(up.ptr)))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func minInt(i, j int) int {
|
||||
if i < j {
|
||||
return i
|
||||
}
|
||||
return j
|
||||
}
|
||||
|
||||
func strVal(obj map[string]any, prop string) (string, bool) {
|
||||
v, ok := obj[prop]
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
s, ok := v.(string)
|
||||
return s, ok
|
||||
}
|
||||
|
||||
func isInteger(num any) bool {
|
||||
rat, ok := new(big.Rat).SetString(fmt.Sprint(num))
|
||||
return ok && rat.IsInt()
|
||||
}
|
||||
|
||||
// quote returns single-quoted string.
|
||||
// used for embedding quoted strings in json.
|
||||
func quote(s string) string {
|
||||
s = fmt.Sprintf("%q", s)
|
||||
s = strings.ReplaceAll(s, `\"`, `"`)
|
||||
s = strings.ReplaceAll(s, `'`, `\'`)
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
|
||||
func equals(v1, v2 any) (bool, ErrorKind) {
|
||||
switch v1 := v1.(type) {
|
||||
case map[string]any:
|
||||
v2, ok := v2.(map[string]any)
|
||||
if !ok || len(v1) != len(v2) {
|
||||
return false, nil
|
||||
}
|
||||
for k, val1 := range v1 {
|
||||
val2, ok := v2[k]
|
||||
if !ok {
|
||||
return false, nil
|
||||
}
|
||||
if ok, k := equals(val1, val2); !ok || k != nil {
|
||||
return ok, k
|
||||
}
|
||||
}
|
||||
return true, nil
|
||||
case []any:
|
||||
v2, ok := v2.([]any)
|
||||
if !ok || len(v1) != len(v2) {
|
||||
return false, nil
|
||||
}
|
||||
for i := range v1 {
|
||||
if ok, k := equals(v1[i], v2[i]); !ok || k != nil {
|
||||
return ok, k
|
||||
}
|
||||
}
|
||||
return true, nil
|
||||
case nil:
|
||||
return v2 == nil, nil
|
||||
case bool:
|
||||
v2, ok := v2.(bool)
|
||||
return ok && v1 == v2, nil
|
||||
case string:
|
||||
v2, ok := v2.(string)
|
||||
return ok && v1 == v2, nil
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
num1, ok1 := new(big.Rat).SetString(fmt.Sprint(v1))
|
||||
num2, ok2 := new(big.Rat).SetString(fmt.Sprint(v2))
|
||||
return ok1 && ok2 && num1.Cmp(num2) == 0, nil
|
||||
default:
|
||||
return false, &kind.InvalidJsonValue{Value: v1}
|
||||
}
|
||||
}
|
||||
|
||||
func duplicates(arr []any) (int, int, ErrorKind) {
|
||||
if len(arr) <= 20 {
|
||||
for i := 1; i < len(arr); i++ {
|
||||
for j := 0; j < i; j++ {
|
||||
if ok, k := equals(arr[i], arr[j]); ok || k != nil {
|
||||
return j, i, k
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1, -1, nil
|
||||
}
|
||||
|
||||
m := make(map[uint64][]int)
|
||||
h := new(maphash.Hash)
|
||||
for i, item := range arr {
|
||||
h.Reset()
|
||||
writeHash(item, h)
|
||||
hash := h.Sum64()
|
||||
indexes, ok := m[hash]
|
||||
if ok {
|
||||
for _, j := range indexes {
|
||||
if ok, k := equals(item, arr[j]); ok || k != nil {
|
||||
return j, i, k
|
||||
}
|
||||
}
|
||||
}
|
||||
indexes = append(indexes, i)
|
||||
m[hash] = indexes
|
||||
}
|
||||
return -1, -1, nil
|
||||
}
|
||||
|
||||
func writeHash(v any, h *maphash.Hash) ErrorKind {
|
||||
switch v := v.(type) {
|
||||
case map[string]any:
|
||||
_ = h.WriteByte(0)
|
||||
props := make([]string, 0, len(v))
|
||||
for prop := range v {
|
||||
props = append(props, prop)
|
||||
}
|
||||
slices.Sort(props)
|
||||
for _, prop := range props {
|
||||
writeHash(prop, h)
|
||||
writeHash(v[prop], h)
|
||||
}
|
||||
case []any:
|
||||
_ = h.WriteByte(1)
|
||||
for _, item := range v {
|
||||
writeHash(item, h)
|
||||
}
|
||||
case nil:
|
||||
_ = h.WriteByte(2)
|
||||
case bool:
|
||||
_ = h.WriteByte(3)
|
||||
if v {
|
||||
_ = h.WriteByte(1)
|
||||
} else {
|
||||
_ = h.WriteByte(0)
|
||||
}
|
||||
case string:
|
||||
_ = h.WriteByte(4)
|
||||
_, _ = h.WriteString(v)
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
_ = h.WriteByte(5)
|
||||
num, _ := new(big.Rat).SetString(fmt.Sprint(v))
|
||||
_, _ = h.Write(num.Num().Bytes())
|
||||
_, _ = h.Write(num.Denom().Bytes())
|
||||
default:
|
||||
return &kind.InvalidJsonValue{Value: v}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ParseURLError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *ParseURLError) Error() string {
|
||||
return fmt.Sprintf("error in parsing %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type InvalidJsonPointerError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *InvalidJsonPointerError) Error() string {
|
||||
return fmt.Sprintf("invalid json-pointer %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type JSONPointerNotFoundError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *JSONPointerNotFoundError) Error() string {
|
||||
return fmt.Sprintf("json-pointer in %q not found", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type SchemaValidationError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *SchemaValidationError) Error() string {
|
||||
return fmt.Sprintf("%q is not valid against metaschema: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// LocalizableError is an error whose message is localizable.
|
||||
func LocalizableError(format string, args ...any) error {
|
||||
return &localizableError{format, args}
|
||||
}
|
||||
|
||||
type localizableError struct {
|
||||
msg string
|
||||
args []any
|
||||
}
|
||||
|
||||
func (e *localizableError) Error() string {
|
||||
return fmt.Sprintf(e.msg, e.args...)
|
||||
}
|
||||
|
||||
func (e *localizableError) LocalizedError(p *message.Printer) string {
|
||||
return p.Sprintf(e.msg, e.args...)
|
||||
}
|
||||
975
vendor/github.com/santhosh-tekuri/jsonschema/v6/validator.go
generated
vendored
Normal file
975
vendor/github.com/santhosh-tekuri/jsonschema/v6/validator.go
generated
vendored
Normal file
|
|
@ -0,0 +1,975 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"slices"
|
||||
"strconv"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6/kind"
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
func (sch *Schema) Validate(v any) error {
|
||||
return sch.validate(v, nil, nil, nil, false, nil)
|
||||
}
|
||||
|
||||
func (sch *Schema) validate(v any, regexpEngine RegexpEngine, meta *Schema, resources map[jsonPointer]*resource, assertVocabs bool, vocabularies map[string]*Vocabulary) error {
|
||||
vd := validator{
|
||||
v: v,
|
||||
vloc: make([]string, 0, 8),
|
||||
sch: sch,
|
||||
scp: &scope{sch, "", 0, nil},
|
||||
uneval: unevalFrom(v, sch, false),
|
||||
errors: nil,
|
||||
boolResult: false,
|
||||
regexpEngine: regexpEngine,
|
||||
meta: meta,
|
||||
resources: resources,
|
||||
assertVocabs: assertVocabs,
|
||||
vocabularies: vocabularies,
|
||||
}
|
||||
if _, err := vd.validate(); err != nil {
|
||||
verr := err.(*ValidationError)
|
||||
var causes []*ValidationError
|
||||
if _, ok := verr.ErrorKind.(*kind.Group); ok {
|
||||
causes = verr.Causes
|
||||
} else {
|
||||
causes = []*ValidationError{verr}
|
||||
}
|
||||
return &ValidationError{
|
||||
SchemaURL: sch.Location,
|
||||
InstanceLocation: nil,
|
||||
ErrorKind: &kind.Schema{Location: sch.Location},
|
||||
Causes: causes,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type validator struct {
|
||||
v any
|
||||
vloc []string
|
||||
sch *Schema
|
||||
scp *scope
|
||||
uneval *uneval
|
||||
errors []*ValidationError
|
||||
boolResult bool // is interested to know valid or not (but not actuall error)
|
||||
regexpEngine RegexpEngine
|
||||
|
||||
// meta validation
|
||||
meta *Schema // set only when validating with metaschema
|
||||
resources map[jsonPointer]*resource // resources which should be validated with their dialect
|
||||
assertVocabs bool
|
||||
vocabularies map[string]*Vocabulary
|
||||
}
|
||||
|
||||
func (vd *validator) validate() (*uneval, error) {
|
||||
s := vd.sch
|
||||
v := vd.v
|
||||
|
||||
// boolean --
|
||||
if s.Bool != nil {
|
||||
if *s.Bool {
|
||||
return vd.uneval, nil
|
||||
} else {
|
||||
return nil, vd.error(&kind.FalseSchema{})
|
||||
}
|
||||
}
|
||||
|
||||
// check cycle --
|
||||
if scp := vd.scp.checkCycle(); scp != nil {
|
||||
return nil, vd.error(&kind.RefCycle{
|
||||
URL: s.Location,
|
||||
KeywordLocation1: vd.scp.kwLoc(),
|
||||
KeywordLocation2: scp.kwLoc(),
|
||||
})
|
||||
}
|
||||
|
||||
t := typeOf(v)
|
||||
if t == invalidType {
|
||||
return nil, vd.error(&kind.InvalidJsonValue{Value: v})
|
||||
}
|
||||
|
||||
// type --
|
||||
if s.Types != nil && !s.Types.IsEmpty() {
|
||||
matched := s.Types.contains(t) || (s.Types.contains(integerType) && t == numberType && isInteger(v))
|
||||
if !matched {
|
||||
return nil, vd.error(&kind.Type{Got: t.String(), Want: s.Types.ToStrings()})
|
||||
}
|
||||
}
|
||||
|
||||
// const --
|
||||
if s.Const != nil {
|
||||
ok, k := equals(v, *s.Const)
|
||||
if k != nil {
|
||||
return nil, vd.error(k)
|
||||
} else if !ok {
|
||||
return nil, vd.error(&kind.Const{Got: v, Want: *s.Const})
|
||||
}
|
||||
}
|
||||
|
||||
// enum --
|
||||
if s.Enum != nil {
|
||||
matched := s.Enum.types.contains(typeOf(v))
|
||||
if matched {
|
||||
matched = false
|
||||
for _, item := range s.Enum.Values {
|
||||
ok, k := equals(v, item)
|
||||
if k != nil {
|
||||
return nil, vd.error(k)
|
||||
} else if ok {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return nil, vd.error(&kind.Enum{Got: v, Want: s.Enum.Values})
|
||||
}
|
||||
}
|
||||
|
||||
// format --
|
||||
if s.Format != nil {
|
||||
var err error
|
||||
if s.Format.Name == "regex" && vd.regexpEngine != nil {
|
||||
err = vd.regexpEngine.validate(v)
|
||||
} else {
|
||||
err = s.Format.Validate(v)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, vd.error(&kind.Format{Got: v, Want: s.Format.Name, Err: err})
|
||||
}
|
||||
}
|
||||
|
||||
// $ref --
|
||||
if s.Ref != nil {
|
||||
err := vd.validateRef(s.Ref, "$ref")
|
||||
if s.DraftVersion < 2019 {
|
||||
return vd.uneval, err
|
||||
}
|
||||
if err != nil {
|
||||
vd.addErr(err)
|
||||
}
|
||||
}
|
||||
|
||||
// type specific validations --
|
||||
switch v := v.(type) {
|
||||
case map[string]any:
|
||||
vd.objValidate(v)
|
||||
case []any:
|
||||
vd.arrValidate(v)
|
||||
case string:
|
||||
vd.strValidate(v)
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
vd.numValidate(v)
|
||||
}
|
||||
|
||||
if len(vd.errors) == 0 || !vd.boolResult {
|
||||
if s.DraftVersion >= 2019 {
|
||||
vd.validateRefs()
|
||||
}
|
||||
vd.condValidate()
|
||||
|
||||
for _, ext := range s.Extensions {
|
||||
ext.Validate(&ValidatorContext{vd}, v)
|
||||
}
|
||||
|
||||
if s.DraftVersion >= 2019 {
|
||||
vd.unevalValidate()
|
||||
}
|
||||
}
|
||||
|
||||
switch len(vd.errors) {
|
||||
case 0:
|
||||
return vd.uneval, nil
|
||||
case 1:
|
||||
return nil, vd.errors[0]
|
||||
default:
|
||||
verr := vd.error(&kind.Group{})
|
||||
verr.Causes = vd.errors
|
||||
return nil, verr
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) objValidate(obj map[string]any) {
|
||||
s := vd.sch
|
||||
|
||||
// minProperties --
|
||||
if s.MinProperties != nil {
|
||||
if len(obj) < *s.MinProperties {
|
||||
vd.addError(&kind.MinProperties{Got: len(obj), Want: *s.MinProperties})
|
||||
}
|
||||
}
|
||||
|
||||
// maxProperties --
|
||||
if s.MaxProperties != nil {
|
||||
if len(obj) > *s.MaxProperties {
|
||||
vd.addError(&kind.MaxProperties{Got: len(obj), Want: *s.MaxProperties})
|
||||
}
|
||||
}
|
||||
|
||||
// required --
|
||||
if len(s.Required) > 0 {
|
||||
if missing := vd.findMissing(obj, s.Required); missing != nil {
|
||||
vd.addError(&kind.Required{Missing: missing})
|
||||
}
|
||||
}
|
||||
|
||||
if vd.boolResult && len(vd.errors) > 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// dependencies --
|
||||
for pname, dep := range s.Dependencies {
|
||||
if _, ok := obj[pname]; ok {
|
||||
switch dep := dep.(type) {
|
||||
case []string:
|
||||
if missing := vd.findMissing(obj, dep); missing != nil {
|
||||
vd.addError(&kind.Dependency{Prop: pname, Missing: missing})
|
||||
}
|
||||
case *Schema:
|
||||
vd.addErr(vd.validateSelf(dep, "", false))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var additionalPros []string
|
||||
for pname, pvalue := range obj {
|
||||
if vd.boolResult && len(vd.errors) > 0 {
|
||||
return
|
||||
}
|
||||
evaluated := false
|
||||
|
||||
// properties --
|
||||
if sch, ok := s.Properties[pname]; ok {
|
||||
evaluated = true
|
||||
vd.addErr(vd.validateVal(sch, pvalue, pname))
|
||||
}
|
||||
|
||||
// patternProperties --
|
||||
for regex, sch := range s.PatternProperties {
|
||||
if regex.MatchString(pname) {
|
||||
evaluated = true
|
||||
vd.addErr(vd.validateVal(sch, pvalue, pname))
|
||||
}
|
||||
}
|
||||
|
||||
if !evaluated && s.AdditionalProperties != nil {
|
||||
evaluated = true
|
||||
switch additional := s.AdditionalProperties.(type) {
|
||||
case bool:
|
||||
if !additional {
|
||||
additionalPros = append(additionalPros, pname)
|
||||
}
|
||||
case *Schema:
|
||||
vd.addErr(vd.validateVal(additional, pvalue, pname))
|
||||
}
|
||||
}
|
||||
|
||||
if evaluated {
|
||||
delete(vd.uneval.props, pname)
|
||||
}
|
||||
}
|
||||
if len(additionalPros) > 0 {
|
||||
vd.addError(&kind.AdditionalProperties{Properties: additionalPros})
|
||||
}
|
||||
|
||||
if s.DraftVersion == 4 {
|
||||
return
|
||||
}
|
||||
|
||||
// propertyNames --
|
||||
if s.PropertyNames != nil {
|
||||
for pname := range obj {
|
||||
sch, meta, resources := s.PropertyNames, vd.meta, vd.resources
|
||||
res := vd.metaResource(sch)
|
||||
if res != nil {
|
||||
meta = res.dialect.getSchema(vd.assertVocabs, vd.vocabularies)
|
||||
sch = meta
|
||||
}
|
||||
if err := sch.validate(pname, vd.regexpEngine, meta, resources, vd.assertVocabs, vd.vocabularies); err != nil {
|
||||
verr := err.(*ValidationError)
|
||||
verr.SchemaURL = s.PropertyNames.Location
|
||||
verr.ErrorKind = &kind.PropertyNames{Property: pname}
|
||||
vd.addErr(verr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.DraftVersion == 6 {
|
||||
return
|
||||
}
|
||||
|
||||
// dependentSchemas --
|
||||
for pname, sch := range s.DependentSchemas {
|
||||
if _, ok := obj[pname]; ok {
|
||||
vd.addErr(vd.validateSelf(sch, "", false))
|
||||
}
|
||||
}
|
||||
|
||||
// dependentRequired --
|
||||
for pname, reqd := range s.DependentRequired {
|
||||
if _, ok := obj[pname]; ok {
|
||||
if missing := vd.findMissing(obj, reqd); missing != nil {
|
||||
vd.addError(&kind.DependentRequired{Prop: pname, Missing: missing})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) arrValidate(arr []any) {
|
||||
s := vd.sch
|
||||
|
||||
// minItems --
|
||||
if s.MinItems != nil {
|
||||
if len(arr) < *s.MinItems {
|
||||
vd.addError(&kind.MinItems{Got: len(arr), Want: *s.MinItems})
|
||||
}
|
||||
}
|
||||
|
||||
// maxItems --
|
||||
if s.MaxItems != nil {
|
||||
if len(arr) > *s.MaxItems {
|
||||
vd.addError(&kind.MaxItems{Got: len(arr), Want: *s.MaxItems})
|
||||
}
|
||||
}
|
||||
|
||||
// uniqueItems --
|
||||
if s.UniqueItems && len(arr) > 1 {
|
||||
i, j, k := duplicates(arr)
|
||||
if k != nil {
|
||||
vd.addError(k)
|
||||
} else if i != -1 {
|
||||
vd.addError(&kind.UniqueItems{Duplicates: [2]int{i, j}})
|
||||
}
|
||||
}
|
||||
|
||||
if s.DraftVersion < 2020 {
|
||||
evaluated := 0
|
||||
|
||||
// items --
|
||||
switch items := s.Items.(type) {
|
||||
case *Schema:
|
||||
for i, item := range arr {
|
||||
vd.addErr(vd.validateVal(items, item, strconv.Itoa(i)))
|
||||
}
|
||||
evaluated = len(arr)
|
||||
case []*Schema:
|
||||
min := minInt(len(arr), len(items))
|
||||
for i, item := range arr[:min] {
|
||||
vd.addErr(vd.validateVal(items[i], item, strconv.Itoa(i)))
|
||||
}
|
||||
evaluated = min
|
||||
}
|
||||
|
||||
// additionalItems --
|
||||
if s.AdditionalItems != nil {
|
||||
switch additional := s.AdditionalItems.(type) {
|
||||
case bool:
|
||||
if !additional && evaluated != len(arr) {
|
||||
vd.addError(&kind.AdditionalItems{Count: len(arr) - evaluated})
|
||||
}
|
||||
case *Schema:
|
||||
for i, item := range arr[evaluated:] {
|
||||
vd.addErr(vd.validateVal(additional, item, strconv.Itoa(i)))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
evaluated := minInt(len(s.PrefixItems), len(arr))
|
||||
|
||||
// prefixItems --
|
||||
for i, item := range arr[:evaluated] {
|
||||
vd.addErr(vd.validateVal(s.PrefixItems[i], item, strconv.Itoa(i)))
|
||||
}
|
||||
|
||||
// items2020 --
|
||||
if s.Items2020 != nil {
|
||||
for i, item := range arr[evaluated:] {
|
||||
vd.addErr(vd.validateVal(s.Items2020, item, strconv.Itoa(i)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// contains --
|
||||
if s.Contains != nil {
|
||||
var errors []*ValidationError
|
||||
var matched []int
|
||||
|
||||
for i, item := range arr {
|
||||
if err := vd.validateVal(s.Contains, item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
} else {
|
||||
matched = append(matched, i)
|
||||
if s.DraftVersion >= 2020 {
|
||||
delete(vd.uneval.items, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// minContains --
|
||||
if s.MinContains != nil {
|
||||
if len(matched) < *s.MinContains {
|
||||
vd.addErrors(errors, &kind.MinContains{Got: matched, Want: *s.MinContains})
|
||||
}
|
||||
} else if len(matched) == 0 {
|
||||
vd.addErrors(errors, &kind.Contains{})
|
||||
}
|
||||
|
||||
// maxContains --
|
||||
if s.MaxContains != nil {
|
||||
if len(matched) > *s.MaxContains {
|
||||
vd.addError(&kind.MaxContains{Got: matched, Want: *s.MaxContains})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) strValidate(str string) {
|
||||
s := vd.sch
|
||||
|
||||
strLen := -1
|
||||
if s.MinLength != nil || s.MaxLength != nil {
|
||||
strLen = utf8.RuneCount([]byte(str))
|
||||
}
|
||||
|
||||
// minLength --
|
||||
if s.MinLength != nil {
|
||||
if strLen < *s.MinLength {
|
||||
vd.addError(&kind.MinLength{Got: strLen, Want: *s.MinLength})
|
||||
}
|
||||
}
|
||||
|
||||
// maxLength --
|
||||
if s.MaxLength != nil {
|
||||
if strLen > *s.MaxLength {
|
||||
vd.addError(&kind.MaxLength{Got: strLen, Want: *s.MaxLength})
|
||||
}
|
||||
}
|
||||
|
||||
// pattern --
|
||||
if s.Pattern != nil {
|
||||
if !s.Pattern.MatchString(str) {
|
||||
vd.addError(&kind.Pattern{Got: str, Want: s.Pattern.String()})
|
||||
}
|
||||
}
|
||||
|
||||
if s.DraftVersion == 6 {
|
||||
return
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
// contentEncoding --
|
||||
decoded := []byte(str)
|
||||
if s.ContentEncoding != nil {
|
||||
decoded, err = s.ContentEncoding.Decode(str)
|
||||
if err != nil {
|
||||
decoded = nil
|
||||
vd.addError(&kind.ContentEncoding{Want: s.ContentEncoding.Name, Err: err})
|
||||
}
|
||||
}
|
||||
|
||||
var deserialized *any
|
||||
if decoded != nil && s.ContentMediaType != nil {
|
||||
if s.ContentSchema == nil {
|
||||
err = s.ContentMediaType.Validate(decoded)
|
||||
} else {
|
||||
var value any
|
||||
value, err = s.ContentMediaType.UnmarshalJSON(decoded)
|
||||
if err == nil {
|
||||
deserialized = &value
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
vd.addError(&kind.ContentMediaType{
|
||||
Got: decoded,
|
||||
Want: s.ContentMediaType.Name,
|
||||
Err: err,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if deserialized != nil && s.ContentSchema != nil {
|
||||
sch, meta, resources := s.ContentSchema, vd.meta, vd.resources
|
||||
res := vd.metaResource(sch)
|
||||
if res != nil {
|
||||
meta = res.dialect.getSchema(vd.assertVocabs, vd.vocabularies)
|
||||
sch = meta
|
||||
}
|
||||
if err = sch.validate(*deserialized, vd.regexpEngine, meta, resources, vd.assertVocabs, vd.vocabularies); err != nil {
|
||||
verr := err.(*ValidationError)
|
||||
verr.SchemaURL = s.Location
|
||||
verr.ErrorKind = &kind.ContentSchema{}
|
||||
vd.addErr(verr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) numValidate(v any) {
|
||||
s := vd.sch
|
||||
|
||||
var numVal *big.Rat
|
||||
num := func() *big.Rat {
|
||||
if numVal == nil {
|
||||
numVal, _ = new(big.Rat).SetString(fmt.Sprintf("%v", v))
|
||||
}
|
||||
return numVal
|
||||
}
|
||||
|
||||
// minimum --
|
||||
if s.Minimum != nil && num().Cmp(s.Minimum) < 0 {
|
||||
vd.addError(&kind.Minimum{Got: num(), Want: s.Minimum})
|
||||
}
|
||||
|
||||
// maximum --
|
||||
if s.Maximum != nil && num().Cmp(s.Maximum) > 0 {
|
||||
vd.addError(&kind.Maximum{Got: num(), Want: s.Maximum})
|
||||
}
|
||||
|
||||
// exclusiveMinimum
|
||||
if s.ExclusiveMinimum != nil && num().Cmp(s.ExclusiveMinimum) <= 0 {
|
||||
vd.addError(&kind.ExclusiveMinimum{Got: num(), Want: s.ExclusiveMinimum})
|
||||
}
|
||||
|
||||
// exclusiveMaximum
|
||||
if s.ExclusiveMaximum != nil && num().Cmp(s.ExclusiveMaximum) >= 0 {
|
||||
vd.addError(&kind.ExclusiveMaximum{Got: num(), Want: s.ExclusiveMaximum})
|
||||
}
|
||||
|
||||
// multipleOf
|
||||
if s.MultipleOf != nil {
|
||||
if q := new(big.Rat).Quo(num(), s.MultipleOf); !q.IsInt() {
|
||||
vd.addError(&kind.MultipleOf{Got: num(), Want: s.MultipleOf})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) condValidate() {
|
||||
s := vd.sch
|
||||
|
||||
// not --
|
||||
if s.Not != nil {
|
||||
if vd.validateSelf(s.Not, "", true) == nil {
|
||||
vd.addError(&kind.Not{})
|
||||
}
|
||||
}
|
||||
|
||||
// allOf --
|
||||
if len(s.AllOf) > 0 {
|
||||
var errors []*ValidationError
|
||||
for _, sch := range s.AllOf {
|
||||
if err := vd.validateSelf(sch, "", false); err != nil {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
if vd.boolResult {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(errors) != 0 {
|
||||
vd.addErrors(errors, &kind.AllOf{})
|
||||
}
|
||||
}
|
||||
|
||||
// anyOf
|
||||
if len(s.AnyOf) > 0 {
|
||||
var matched bool
|
||||
var errors []*ValidationError
|
||||
for _, sch := range s.AnyOf {
|
||||
if err := vd.validateSelf(sch, "", false); err != nil {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
} else {
|
||||
matched = true
|
||||
// for uneval, all schemas must be evaluated
|
||||
if vd.uneval.isEmpty() {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
vd.addErrors(errors, &kind.AnyOf{})
|
||||
}
|
||||
}
|
||||
|
||||
// oneOf
|
||||
if len(s.OneOf) > 0 {
|
||||
var matched = -1
|
||||
var errors []*ValidationError
|
||||
for i, sch := range s.OneOf {
|
||||
if err := vd.validateSelf(sch, "", matched != -1); err != nil {
|
||||
if matched == -1 {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
}
|
||||
} else {
|
||||
if matched == -1 {
|
||||
matched = i
|
||||
} else {
|
||||
vd.addError(&kind.OneOf{Subschemas: []int{matched, i}})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if matched == -1 {
|
||||
vd.addErrors(errors, &kind.OneOf{Subschemas: nil})
|
||||
}
|
||||
}
|
||||
|
||||
// if, then, else --
|
||||
if s.If != nil {
|
||||
if vd.validateSelf(s.If, "", true) == nil {
|
||||
if s.Then != nil {
|
||||
vd.addErr(vd.validateSelf(s.Then, "", false))
|
||||
}
|
||||
} else if s.Else != nil {
|
||||
vd.addErr(vd.validateSelf(s.Else, "", false))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) unevalValidate() {
|
||||
s := vd.sch
|
||||
|
||||
// unevaluatedProperties
|
||||
if obj, ok := vd.v.(map[string]any); ok && s.UnevaluatedProperties != nil {
|
||||
for pname := range vd.uneval.props {
|
||||
if pvalue, ok := obj[pname]; ok {
|
||||
vd.addErr(vd.validateVal(s.UnevaluatedProperties, pvalue, pname))
|
||||
}
|
||||
}
|
||||
vd.uneval.props = nil
|
||||
}
|
||||
|
||||
// unevaluatedItems
|
||||
if arr, ok := vd.v.([]any); ok && s.UnevaluatedItems != nil {
|
||||
for i := range vd.uneval.items {
|
||||
vd.addErr(vd.validateVal(s.UnevaluatedItems, arr[i], strconv.Itoa(i)))
|
||||
}
|
||||
vd.uneval.items = nil
|
||||
}
|
||||
}
|
||||
|
||||
// validation helpers --
|
||||
|
||||
func (vd *validator) validateSelf(sch *Schema, refKw string, boolResult bool) error {
|
||||
scp := vd.scp.child(sch, refKw, vd.scp.vid)
|
||||
uneval := unevalFrom(vd.v, sch, !vd.uneval.isEmpty())
|
||||
subvd := validator{
|
||||
v: vd.v,
|
||||
vloc: vd.vloc,
|
||||
sch: sch,
|
||||
scp: scp,
|
||||
uneval: uneval,
|
||||
errors: nil,
|
||||
boolResult: vd.boolResult || boolResult,
|
||||
regexpEngine: vd.regexpEngine,
|
||||
meta: vd.meta,
|
||||
resources: vd.resources,
|
||||
assertVocabs: vd.assertVocabs,
|
||||
vocabularies: vd.vocabularies,
|
||||
}
|
||||
subvd.handleMeta()
|
||||
uneval, err := subvd.validate()
|
||||
if err == nil {
|
||||
vd.uneval.merge(uneval)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (vd *validator) validateVal(sch *Schema, v any, vtok string) error {
|
||||
vloc := append(vd.vloc, vtok)
|
||||
scp := vd.scp.child(sch, "", vd.scp.vid+1)
|
||||
uneval := unevalFrom(v, sch, false)
|
||||
subvd := validator{
|
||||
v: v,
|
||||
vloc: vloc,
|
||||
sch: sch,
|
||||
scp: scp,
|
||||
uneval: uneval,
|
||||
errors: nil,
|
||||
boolResult: vd.boolResult,
|
||||
regexpEngine: vd.regexpEngine,
|
||||
meta: vd.meta,
|
||||
resources: vd.resources,
|
||||
assertVocabs: vd.assertVocabs,
|
||||
vocabularies: vd.vocabularies,
|
||||
}
|
||||
subvd.handleMeta()
|
||||
_, err := subvd.validate()
|
||||
return err
|
||||
}
|
||||
|
||||
func (vd *validator) validateValue(sch *Schema, v any, vpath []string) error {
|
||||
vloc := append(vd.vloc, vpath...)
|
||||
scp := vd.scp.child(sch, "", vd.scp.vid+1)
|
||||
uneval := unevalFrom(v, sch, false)
|
||||
subvd := validator{
|
||||
v: v,
|
||||
vloc: vloc,
|
||||
sch: sch,
|
||||
scp: scp,
|
||||
uneval: uneval,
|
||||
errors: nil,
|
||||
boolResult: vd.boolResult,
|
||||
regexpEngine: vd.regexpEngine,
|
||||
meta: vd.meta,
|
||||
resources: vd.resources,
|
||||
assertVocabs: vd.assertVocabs,
|
||||
vocabularies: vd.vocabularies,
|
||||
}
|
||||
subvd.handleMeta()
|
||||
_, err := subvd.validate()
|
||||
return err
|
||||
}
|
||||
|
||||
func (vd *validator) metaResource(sch *Schema) *resource {
|
||||
if sch != vd.meta {
|
||||
return nil
|
||||
}
|
||||
ptr := ""
|
||||
for _, tok := range vd.instanceLocation() {
|
||||
ptr += "/"
|
||||
ptr += escape(tok)
|
||||
}
|
||||
return vd.resources[jsonPointer(ptr)]
|
||||
}
|
||||
|
||||
func (vd *validator) handleMeta() {
|
||||
res := vd.metaResource(vd.sch)
|
||||
if res == nil {
|
||||
return
|
||||
}
|
||||
sch := res.dialect.getSchema(vd.assertVocabs, vd.vocabularies)
|
||||
vd.meta = sch
|
||||
vd.sch = sch
|
||||
}
|
||||
|
||||
// reference validation --
|
||||
|
||||
func (vd *validator) validateRef(sch *Schema, kw string) error {
|
||||
err := vd.validateSelf(sch, kw, false)
|
||||
if err != nil {
|
||||
refErr := vd.error(&kind.Reference{Keyword: kw, URL: sch.Location})
|
||||
verr := err.(*ValidationError)
|
||||
if _, ok := verr.ErrorKind.(*kind.Group); ok {
|
||||
refErr.Causes = verr.Causes
|
||||
} else {
|
||||
refErr.Causes = append(refErr.Causes, verr)
|
||||
}
|
||||
return refErr
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vd *validator) resolveRecursiveAnchor(fallback *Schema) *Schema {
|
||||
sch := fallback
|
||||
scp := vd.scp
|
||||
for scp != nil {
|
||||
if scp.sch.resource.RecursiveAnchor {
|
||||
sch = scp.sch
|
||||
}
|
||||
scp = scp.parent
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (vd *validator) resolveDynamicAnchor(name string, fallback *Schema) *Schema {
|
||||
sch := fallback
|
||||
scp := vd.scp
|
||||
for scp != nil {
|
||||
if dsch, ok := scp.sch.resource.dynamicAnchors[name]; ok {
|
||||
sch = dsch
|
||||
}
|
||||
scp = scp.parent
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (vd *validator) validateRefs() {
|
||||
// $recursiveRef --
|
||||
if sch := vd.sch.RecursiveRef; sch != nil {
|
||||
if sch.RecursiveAnchor {
|
||||
sch = vd.resolveRecursiveAnchor(sch)
|
||||
}
|
||||
vd.addErr(vd.validateRef(sch, "$recursiveRef"))
|
||||
}
|
||||
|
||||
// $dynamicRef --
|
||||
if dref := vd.sch.DynamicRef; dref != nil {
|
||||
sch := dref.Ref // initial target
|
||||
if dref.Anchor != "" {
|
||||
// $dynamicRef includes anchor
|
||||
if sch.DynamicAnchor == dref.Anchor {
|
||||
// initial target has matching $dynamicAnchor
|
||||
sch = vd.resolveDynamicAnchor(dref.Anchor, sch)
|
||||
}
|
||||
}
|
||||
vd.addErr(vd.validateRef(sch, "$dynamicRef"))
|
||||
}
|
||||
}
|
||||
|
||||
// error helpers --
|
||||
|
||||
func (vd *validator) instanceLocation() []string {
|
||||
return slices.Clone(vd.vloc)
|
||||
}
|
||||
|
||||
func (vd *validator) error(kind ErrorKind) *ValidationError {
|
||||
if vd.boolResult {
|
||||
return &ValidationError{}
|
||||
}
|
||||
return &ValidationError{
|
||||
SchemaURL: vd.sch.Location,
|
||||
InstanceLocation: vd.instanceLocation(),
|
||||
ErrorKind: kind,
|
||||
Causes: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) addErr(err error) {
|
||||
if err != nil {
|
||||
vd.errors = append(vd.errors, err.(*ValidationError))
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) addError(kind ErrorKind) {
|
||||
vd.errors = append(vd.errors, vd.error(kind))
|
||||
}
|
||||
|
||||
func (vd *validator) addErrors(errors []*ValidationError, kind ErrorKind) {
|
||||
err := vd.error(kind)
|
||||
err.Causes = errors
|
||||
vd.errors = append(vd.errors, err)
|
||||
}
|
||||
|
||||
func (vd *validator) findMissing(obj map[string]any, reqd []string) []string {
|
||||
var missing []string
|
||||
for _, pname := range reqd {
|
||||
if _, ok := obj[pname]; !ok {
|
||||
if vd.boolResult {
|
||||
return []string{} // non-nil
|
||||
}
|
||||
missing = append(missing, pname)
|
||||
}
|
||||
}
|
||||
return missing
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type scope struct {
|
||||
sch *Schema
|
||||
|
||||
// if empty, compute from self.sch and self.parent.sch.
|
||||
// not empty, only when there is a jump i.e, $ref, $XXXRef
|
||||
refKeyword string
|
||||
|
||||
// unique id of value being validated
|
||||
// if two scopes validate same value, they will have
|
||||
// same vid
|
||||
vid int
|
||||
|
||||
parent *scope
|
||||
}
|
||||
|
||||
func (sc *scope) child(sch *Schema, refKeyword string, vid int) *scope {
|
||||
return &scope{sch, refKeyword, vid, sc}
|
||||
}
|
||||
|
||||
func (sc *scope) checkCycle() *scope {
|
||||
scp := sc.parent
|
||||
for scp != nil {
|
||||
if scp.vid != sc.vid {
|
||||
break
|
||||
}
|
||||
if scp.sch == sc.sch {
|
||||
return scp
|
||||
}
|
||||
scp = scp.parent
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sc *scope) kwLoc() string {
|
||||
var loc string
|
||||
for sc.parent != nil {
|
||||
if sc.refKeyword != "" {
|
||||
loc = fmt.Sprintf("/%s%s", escape(sc.refKeyword), loc)
|
||||
} else {
|
||||
cur := sc.sch.Location
|
||||
parent := sc.parent.sch.Location
|
||||
loc = fmt.Sprintf("%s%s", cur[len(parent):], loc)
|
||||
}
|
||||
sc = sc.parent
|
||||
}
|
||||
return loc
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type uneval struct {
|
||||
props map[string]struct{}
|
||||
items map[int]struct{}
|
||||
}
|
||||
|
||||
func unevalFrom(v any, sch *Schema, callerNeeds bool) *uneval {
|
||||
uneval := &uneval{}
|
||||
switch v := v.(type) {
|
||||
case map[string]any:
|
||||
if !sch.allPropsEvaluated && (callerNeeds || sch.UnevaluatedProperties != nil) {
|
||||
uneval.props = map[string]struct{}{}
|
||||
for k := range v {
|
||||
uneval.props[k] = struct{}{}
|
||||
}
|
||||
}
|
||||
case []any:
|
||||
if !sch.allItemsEvaluated && (callerNeeds || sch.UnevaluatedItems != nil) && sch.numItemsEvaluated < len(v) {
|
||||
uneval.items = map[int]struct{}{}
|
||||
for i := sch.numItemsEvaluated; i < len(v); i++ {
|
||||
uneval.items[i] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
return uneval
|
||||
}
|
||||
|
||||
func (ue *uneval) merge(other *uneval) {
|
||||
for k := range ue.props {
|
||||
if _, ok := other.props[k]; !ok {
|
||||
delete(ue.props, k)
|
||||
}
|
||||
}
|
||||
for i := range ue.items {
|
||||
if _, ok := other.items[i]; !ok {
|
||||
delete(ue.items, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (ue *uneval) isEmpty() bool {
|
||||
return len(ue.props) == 0 && len(ue.items) == 0
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ValidationError struct {
|
||||
// absolute, dereferenced schema location.
|
||||
SchemaURL string
|
||||
|
||||
// location of the JSON value within the instance being validated.
|
||||
InstanceLocation []string
|
||||
|
||||
// kind of error
|
||||
ErrorKind ErrorKind
|
||||
|
||||
// holds nested errors
|
||||
Causes []*ValidationError
|
||||
}
|
||||
|
||||
type ErrorKind interface {
|
||||
KeywordPath() []string
|
||||
LocalizedString(*message.Printer) string
|
||||
}
|
||||
106
vendor/github.com/santhosh-tekuri/jsonschema/v6/vocab.go
generated
vendored
Normal file
106
vendor/github.com/santhosh-tekuri/jsonschema/v6/vocab.go
generated
vendored
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
package jsonschema
|
||||
|
||||
// CompilerContext provides helpers for
|
||||
// compiling a [Vocabulary].
|
||||
type CompilerContext struct {
|
||||
c *objCompiler
|
||||
}
|
||||
|
||||
func (ctx *CompilerContext) Enqueue(schPath []string) *Schema {
|
||||
ptr := ctx.c.up.ptr
|
||||
for _, tok := range schPath {
|
||||
ptr = ptr.append(tok)
|
||||
}
|
||||
return ctx.c.enqueuePtr(ptr)
|
||||
}
|
||||
|
||||
// Vocabulary defines a set of keywords, their syntax and
|
||||
// their semantics.
|
||||
type Vocabulary struct {
|
||||
// URL identifier for this Vocabulary.
|
||||
URL string
|
||||
|
||||
// Schema that is used to validate the keywords that is introduced by this
|
||||
// vocabulary.
|
||||
Schema *Schema
|
||||
|
||||
// Subschemas lists the possible locations of subschemas introduced by
|
||||
// this vocabulary.
|
||||
Subschemas []SchemaPath
|
||||
|
||||
// Compile compiles the keywords(introduced by this vocabulary) in obj into [SchemaExt].
|
||||
// If obj does not contain any keywords introduced by this vocabulary, nil SchemaExt must
|
||||
// be returned.
|
||||
Compile func(ctx *CompilerContext, obj map[string]any) (SchemaExt, error)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// SchemaExt is compled form of vocabulary.
|
||||
type SchemaExt interface {
|
||||
// Validate validates v against and errors if any are reported
|
||||
// to ctx.
|
||||
Validate(ctx *ValidatorContext, v any)
|
||||
}
|
||||
|
||||
// ValidatorContext provides helpers for
|
||||
// validating with [SchemaExt].
|
||||
type ValidatorContext struct {
|
||||
vd *validator
|
||||
}
|
||||
|
||||
// Validate validates v with sch. vpath gives path of v from current context value.
|
||||
func (ctx *ValidatorContext) Validate(sch *Schema, v any, vpath []string) error {
|
||||
switch len(vpath) {
|
||||
case 0:
|
||||
return ctx.vd.validateSelf(sch, "", false)
|
||||
case 1:
|
||||
return ctx.vd.validateVal(sch, v, vpath[0])
|
||||
default:
|
||||
return ctx.vd.validateValue(sch, v, vpath)
|
||||
}
|
||||
}
|
||||
|
||||
// EvaluatedProp marks given property of current object as evaluated.
|
||||
func (ctx *ValidatorContext) EvaluatedProp(pname string) {
|
||||
delete(ctx.vd.uneval.props, pname)
|
||||
}
|
||||
|
||||
// EvaluatedItem marks items at given index of current array as evaluated.
|
||||
func (ctx *ValidatorContext) EvaluatedItem(index int) {
|
||||
delete(ctx.vd.uneval.items, index)
|
||||
}
|
||||
|
||||
// AddError reports validation-error of given kind.
|
||||
func (ctx *ValidatorContext) AddError(k ErrorKind) {
|
||||
ctx.vd.addError(k)
|
||||
}
|
||||
|
||||
// AddErrors reports validation-errors of given kind.
|
||||
func (ctx *ValidatorContext) AddErrors(errors []*ValidationError, k ErrorKind) {
|
||||
ctx.vd.addErrors(errors, k)
|
||||
}
|
||||
|
||||
// AddErr reports the given err. This is typically used to report
|
||||
// the error created by subschema validation.
|
||||
//
|
||||
// NOTE that err must be of type *ValidationError.
|
||||
func (ctx *ValidatorContext) AddErr(err error) {
|
||||
ctx.vd.addErr(err)
|
||||
}
|
||||
|
||||
func (ctx *ValidatorContext) Equals(v1, v2 any) (bool, error) {
|
||||
b, k := equals(v1, v2)
|
||||
if k != nil {
|
||||
return false, ctx.vd.error(k)
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func (ctx *ValidatorContext) Duplicates(arr []any) (int, int, error) {
|
||||
i, j, k := duplicates(arr)
|
||||
if k != nil {
|
||||
return -1, -1, ctx.vd.error(k)
|
||||
}
|
||||
return i, j, nil
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue