mirror of
https://github.com/yannh/kubeconform.git
synced 2026-02-11 05:59:22 +00:00
Update jsonschema library to v6 (#324)
This commit is contained in:
parent
df26febc54
commit
31e9679c96
125 changed files with 35572 additions and 5135 deletions
|
|
@ -1,5 +1,5 @@
|
|||
FROM bats/bats:1.11.0
|
||||
RUN apk --no-cache add ca-certificates parallel libxml2-utils
|
||||
COPY dist/kubeconform_linux_amd64_v1/kubeconform /code/bin/
|
||||
COPY bin/kubeconform /code/bin/
|
||||
COPY acceptance.bats acceptance-nonetwork.bats /code/
|
||||
COPY fixtures /code/fixtures
|
||||
|
|
|
|||
|
|
@ -299,14 +299,14 @@ resetCacheFolder() {
|
|||
@test "Fail when parsing a List that contains an invalid resource" {
|
||||
run bin/kubeconform -summary fixtures/list_invalid.yaml
|
||||
[ "$status" -eq 1 ]
|
||||
[ "${lines[0]}" == 'fixtures/list_invalid.yaml - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema: '\''/spec/replicas'\'' does not validate with https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#/properties/spec/properties/replicas/type: expected integer or null, but got string' ]
|
||||
[ "${lines[0]}" == 'fixtures/list_invalid.yaml - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema validation failed with '\''https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#'\'' - at '\''/spec/replicas'\'': got string, want null or integer' ]
|
||||
[ "${lines[1]}" == 'Summary: 2 resources found in 1 file - Valid: 1, Invalid: 1, Errors: 0, Skipped: 0' ]
|
||||
}
|
||||
|
||||
@test "Fail when parsing a List that contains an invalid resource from stdin" {
|
||||
run bash -c "cat fixtures/list_invalid.yaml | bin/kubeconform -summary -"
|
||||
[ "$status" -eq 1 ]
|
||||
[ "${lines[0]}" == 'stdin - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema: '\''/spec/replicas'\'' does not validate with https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#/properties/spec/properties/replicas/type: expected integer or null, but got string' ]
|
||||
[ "${lines[0]}" == 'stdin - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema validation failed with '\''https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#'\'' - at '\''/spec/replicas'\'': got string, want null or integer' ]
|
||||
[ "${lines[1]}" == 'Summary: 2 resources found parsing stdin - Valid: 1, Invalid: 1, Errors: 0, Skipped: 0' ]
|
||||
}
|
||||
|
||||
|
|
|
|||
10655
fixtures/cache/6dd0c06492c957fe2118a16dae1c5b9e76be072b527a9a45fd1044bd54237334
vendored
Normal file
10655
fixtures/cache/6dd0c06492c957fe2118a16dae1c5b9e76be072b527a9a45fd1044bd54237334
vendored
Normal file
File diff suppressed because it is too large
Load diff
3
go.mod
3
go.mod
|
|
@ -4,7 +4,8 @@ go 1.24
|
|||
|
||||
require (
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.1
|
||||
golang.org/x/text v0.25.0
|
||||
sigs.k8s.io/yaml v1.4.0
|
||||
)
|
||||
|
||||
|
|
|
|||
8
go.sum
8
go.sum
|
|
@ -1,3 +1,5 @@
|
|||
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
|
||||
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
|
||||
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
|
|
@ -12,10 +14,12 @@ github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxec
|
|||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4=
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY=
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw=
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
||||
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
|
||||
|
|
|
|||
4
pkg/cache/cache.go
vendored
4
pkg/cache/cache.go
vendored
|
|
@ -1,6 +1,6 @@
|
|||
package cache
|
||||
|
||||
type Cache interface {
|
||||
Get(resourceKind, resourceAPIVersion, k8sVersion string) (interface{}, error)
|
||||
Set(resourceKind, resourceAPIVersion, k8sVersion string, schema interface{}) error
|
||||
Get(key string) (any, error)
|
||||
Set(key string, schema any) error
|
||||
}
|
||||
|
|
|
|||
18
pkg/cache/inmemory.go
vendored
18
pkg/cache/inmemory.go
vendored
|
|
@ -10,26 +10,21 @@ import (
|
|||
// - This cache caches the parsed Schemas
|
||||
type inMemory struct {
|
||||
sync.RWMutex
|
||||
schemas map[string]interface{}
|
||||
schemas map[string]any
|
||||
}
|
||||
|
||||
// New creates a new cache for downloaded schemas
|
||||
func NewInMemoryCache() Cache {
|
||||
return &inMemory{
|
||||
schemas: map[string]interface{}{},
|
||||
schemas: make(map[string]any),
|
||||
}
|
||||
}
|
||||
|
||||
func key(resourceKind, resourceAPIVersion, k8sVersion string) string {
|
||||
return fmt.Sprintf("%s-%s-%s", resourceKind, resourceAPIVersion, k8sVersion)
|
||||
}
|
||||
|
||||
// Get retrieves the JSON schema given a resource signature
|
||||
func (c *inMemory) Get(resourceKind, resourceAPIVersion, k8sVersion string) (interface{}, error) {
|
||||
k := key(resourceKind, resourceAPIVersion, k8sVersion)
|
||||
func (c *inMemory) Get(key string) (any, error) {
|
||||
c.RLock()
|
||||
defer c.RUnlock()
|
||||
schema, ok := c.schemas[k]
|
||||
schema, ok := c.schemas[key]
|
||||
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("schema not found in in-memory cache")
|
||||
|
|
@ -39,11 +34,10 @@ func (c *inMemory) Get(resourceKind, resourceAPIVersion, k8sVersion string) (int
|
|||
}
|
||||
|
||||
// Set adds a JSON schema to the schema cache
|
||||
func (c *inMemory) Set(resourceKind, resourceAPIVersion, k8sVersion string, schema interface{}) error {
|
||||
k := key(resourceKind, resourceAPIVersion, k8sVersion)
|
||||
func (c *inMemory) Set(key string, schema any) error {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
c.schemas[k] = schema
|
||||
c.schemas[key] = schema
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
17
pkg/cache/ondisk.go
vendored
17
pkg/cache/ondisk.go
vendored
|
|
@ -3,7 +3,6 @@ package cache
|
|||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
|
|
@ -22,17 +21,17 @@ func NewOnDiskCache(cache string) Cache {
|
|||
}
|
||||
}
|
||||
|
||||
func cachePath(folder, resourceKind, resourceAPIVersion, k8sVersion string) string {
|
||||
hash := sha256.Sum256([]byte(fmt.Sprintf("%s-%s-%s", resourceKind, resourceAPIVersion, k8sVersion)))
|
||||
func cachePath(folder, key string) string {
|
||||
hash := sha256.Sum256([]byte(key))
|
||||
return path.Join(folder, hex.EncodeToString(hash[:]))
|
||||
}
|
||||
|
||||
// Get retrieves the JSON schema given a resource signature
|
||||
func (c *onDisk) Get(resourceKind, resourceAPIVersion, k8sVersion string) (interface{}, error) {
|
||||
func (c *onDisk) Get(key string) (any, error) {
|
||||
c.RLock()
|
||||
defer c.RUnlock()
|
||||
|
||||
f, err := os.Open(cachePath(c.folder, resourceKind, resourceAPIVersion, k8sVersion))
|
||||
f, err := os.Open(cachePath(c.folder, key))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -42,8 +41,12 @@ func (c *onDisk) Get(resourceKind, resourceAPIVersion, k8sVersion string) (inter
|
|||
}
|
||||
|
||||
// Set adds a JSON schema to the schema cache
|
||||
func (c *onDisk) Set(resourceKind, resourceAPIVersion, k8sVersion string, schema interface{}) error {
|
||||
func (c *onDisk) Set(key string, schema any) error {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
return os.WriteFile(cachePath(c.folder, resourceKind, resourceAPIVersion, k8sVersion), schema.([]byte), 0644)
|
||||
|
||||
if _, err := os.Stat(cachePath(c.folder, key)); os.IsNotExist(err) {
|
||||
return os.WriteFile(cachePath(c.folder, key), schema.([]byte), 0644)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
78
pkg/loader/file.go
Normal file
78
pkg/loader/file.go
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
package loader
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"io"
|
||||
gourl "net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// FileLoader loads json file url.
|
||||
type FileLoader struct {
|
||||
cache cache.Cache
|
||||
}
|
||||
|
||||
func (l FileLoader) Load(url string) (any, error) {
|
||||
path, err := l.ToFile(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if l.cache != nil {
|
||||
if cached, err := l.cache.Get(path); err == nil {
|
||||
return jsonschema.UnmarshalJSON(bytes.NewReader(cached.([]byte)))
|
||||
}
|
||||
}
|
||||
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
msg := fmt.Sprintf("could not open file %s", path)
|
||||
return nil, NewNotFoundError(errors.New(msg))
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
content, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if l.cache != nil {
|
||||
if err = l.cache.Set(path, content); err != nil {
|
||||
return nil, fmt.Errorf("failed to write cache to disk: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
return jsonschema.UnmarshalJSON(bytes.NewReader(content))
|
||||
}
|
||||
|
||||
// ToFile is helper method to convert file url to file path.
|
||||
func (l FileLoader) ToFile(url string) (string, error) {
|
||||
u, err := gourl.Parse(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if u.Scheme != "file" {
|
||||
return url, nil
|
||||
}
|
||||
path := u.Path
|
||||
if runtime.GOOS == "windows" {
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
path = filepath.FromSlash(path)
|
||||
}
|
||||
return path, nil
|
||||
}
|
||||
|
||||
func NewFileLoader(cache cache.Cache) *FileLoader {
|
||||
return &FileLoader{
|
||||
cache: cache,
|
||||
}
|
||||
}
|
||||
85
pkg/loader/http.go
Normal file
85
pkg/loader/http.go
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
package loader
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/hashicorp/go-retryablehttp"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type HTTPURLLoader struct {
|
||||
client http.Client
|
||||
cache cache.Cache
|
||||
}
|
||||
|
||||
func (l *HTTPURLLoader) Load(url string) (any, error) {
|
||||
if l.cache != nil {
|
||||
if cached, err := l.cache.Get(url); err == nil {
|
||||
return jsonschema.UnmarshalJSON(bytes.NewReader(cached.([]byte)))
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := l.client.Get(url)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed downloading schema at %s: %s", url, err)
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusNotFound {
|
||||
msg := fmt.Sprintf("could not find schema at %s", url)
|
||||
return nil, NewNotFoundError(errors.New(msg))
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
msg := fmt.Sprintf("error while downloading schema at %s - received HTTP status %d", url, resp.StatusCode)
|
||||
return nil, fmt.Errorf("%s", msg)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed parsing schema from %s: %s", url, err)
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
|
||||
if l.cache != nil {
|
||||
if err = l.cache.Set(url, body); err != nil {
|
||||
return nil, fmt.Errorf("failed to write cache to disk: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func NewHTTPURLLoader(skipTLS bool, cache cache.Cache) (*HTTPURLLoader, error) {
|
||||
transport := &http.Transport{
|
||||
MaxIdleConns: 100,
|
||||
IdleConnTimeout: 3 * time.Second,
|
||||
DisableCompression: true,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
|
||||
if skipTLS {
|
||||
transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
|
||||
// retriable http client
|
||||
retryClient := retryablehttp.NewClient()
|
||||
retryClient.RetryMax = 2
|
||||
retryClient.HTTPClient = &http.Client{Transport: transport}
|
||||
retryClient.Logger = nil
|
||||
|
||||
httpLoader := HTTPURLLoader{client: *retryClient.StandardClient(), cache: cache}
|
||||
return &httpLoader, nil
|
||||
}
|
||||
12
pkg/loader/loaders.go
Normal file
12
pkg/loader/loaders.go
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
package loader
|
||||
|
||||
// NotFoundError is returned when the registry does not contain a schema for the resource
|
||||
type NotFoundError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func NewNotFoundError(err error) *NotFoundError {
|
||||
return &NotFoundError{err}
|
||||
}
|
||||
func (e *NotFoundError) Error() string { return e.err.Error() }
|
||||
func (e *NotFoundError) Retryable() bool { return false }
|
||||
|
|
@ -1,129 +1,34 @@
|
|||
package registry
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
retryablehttp "github.com/hashicorp/go-retryablehttp"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
)
|
||||
|
||||
type httpGetter interface {
|
||||
Get(url string) (resp *http.Response, err error)
|
||||
}
|
||||
|
||||
// SchemaRegistry is a file repository (local or remote) that contains JSON schemas for Kubernetes resources
|
||||
type SchemaRegistry struct {
|
||||
c httpGetter
|
||||
schemaPathTemplate string
|
||||
cache cache.Cache
|
||||
strict bool
|
||||
debug bool
|
||||
loader jsonschema.URLLoader
|
||||
}
|
||||
|
||||
func newHTTPRegistry(schemaPathTemplate string, cacheFolder string, strict bool, skipTLS bool, debug bool) (*SchemaRegistry, error) {
|
||||
reghttp := &http.Transport{
|
||||
MaxIdleConns: 100,
|
||||
IdleConnTimeout: 3 * time.Second,
|
||||
DisableCompression: true,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
|
||||
if skipTLS {
|
||||
reghttp.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
|
||||
var filecache cache.Cache = nil
|
||||
if cacheFolder != "" {
|
||||
fi, err := os.Stat(cacheFolder)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed opening cache folder %s: %s", cacheFolder, err)
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return nil, fmt.Errorf("cache folder %s is not a directory", err)
|
||||
}
|
||||
|
||||
filecache = cache.NewOnDiskCache(cacheFolder)
|
||||
}
|
||||
|
||||
// retriable http client
|
||||
retryClient := retryablehttp.NewClient()
|
||||
retryClient.RetryMax = 2
|
||||
retryClient.HTTPClient = &http.Client{Transport: reghttp}
|
||||
retryClient.Logger = nil
|
||||
|
||||
func newHTTPRegistry(schemaPathTemplate string, loader jsonschema.URLLoader, strict bool, debug bool) (*SchemaRegistry, error) {
|
||||
return &SchemaRegistry{
|
||||
c: retryClient.StandardClient(),
|
||||
schemaPathTemplate: schemaPathTemplate,
|
||||
cache: filecache,
|
||||
strict: strict,
|
||||
loader: loader,
|
||||
debug: debug,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// DownloadSchema downloads the schema for a particular resource from an HTTP server
|
||||
func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error) {
|
||||
func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, any, error) {
|
||||
url, err := schemaPath(r.schemaPathTemplate, resourceKind, resourceAPIVersion, k8sVersion, r.strict)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
if r.cache != nil {
|
||||
if b, err := r.cache.Get(resourceKind, resourceAPIVersion, k8sVersion); err == nil {
|
||||
return url, b.([]byte), nil
|
||||
}
|
||||
}
|
||||
resp, err := r.loader.Load(url)
|
||||
|
||||
resp, err := r.c.Get(url)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed downloading schema at %s: %s", url, err)
|
||||
if r.debug {
|
||||
log.Println(msg)
|
||||
}
|
||||
return url, nil, errors.New(msg)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusNotFound {
|
||||
msg := fmt.Sprintf("could not find schema at %s", url)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return url, nil, newNotFoundError(errors.New(msg))
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
msg := fmt.Sprintf("error while downloading schema at %s - received HTTP status %d", url, resp.StatusCode)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return url, nil, fmt.Errorf("%s", msg)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed parsing schema from %s: %s", url, err)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return url, nil, errors.New(msg)
|
||||
}
|
||||
|
||||
if r.debug {
|
||||
log.Printf("using schema found at %s", url)
|
||||
}
|
||||
|
||||
if r.cache != nil {
|
||||
if err := r.cache.Set(resourceKind, resourceAPIVersion, k8sVersion, body); err != nil {
|
||||
return url, nil, fmt.Errorf("failed writing schema to cache: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
return url, body, nil
|
||||
return url, resp, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,7 @@
|
|||
package registry
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type mockHTTPGetter struct {
|
||||
|
|
@ -25,156 +20,156 @@ func (m *mockHTTPGetter) Get(url string) (resp *http.Response, err error) {
|
|||
return m.httpGet(*m, url)
|
||||
}
|
||||
|
||||
func TestDownloadSchema(t *testing.T) {
|
||||
callCounts := map[string]int{}
|
||||
|
||||
// http server to simulate different responses
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
var s int
|
||||
callCounts[r.URL.Path]++
|
||||
callCount := callCounts[r.URL.Path]
|
||||
|
||||
switch r.URL.Path {
|
||||
case "/404":
|
||||
s = http.StatusNotFound
|
||||
case "/500":
|
||||
s = http.StatusInternalServerError
|
||||
case "/503":
|
||||
if callCount < 2 {
|
||||
s = http.StatusServiceUnavailable
|
||||
} else {
|
||||
s = http.StatusOK // Should succeed on 3rd try
|
||||
}
|
||||
|
||||
case "/simulate-reset":
|
||||
if callCount < 2 {
|
||||
if hj, ok := w.(http.Hijacker); ok {
|
||||
conn, _, err := hj.Hijack()
|
||||
if err != nil {
|
||||
fmt.Printf("Hijacking failed: %v\n", err)
|
||||
return
|
||||
}
|
||||
conn.Close() // Close the connection to simulate a reset
|
||||
}
|
||||
return
|
||||
}
|
||||
s = http.StatusOK // Should succeed on third try
|
||||
|
||||
default:
|
||||
s = http.StatusOK
|
||||
}
|
||||
|
||||
w.WriteHeader(s)
|
||||
w.Write([]byte(http.StatusText(s)))
|
||||
})
|
||||
|
||||
port := fmt.Sprint(rand.Intn(1000) + 9000) // random port
|
||||
server := &http.Server{Addr: "127.0.0.1:" + port}
|
||||
url := fmt.Sprintf("http://localhost:%s", port)
|
||||
|
||||
go func() {
|
||||
if err := server.ListenAndServe(); err != nil {
|
||||
fmt.Printf("Failed to start server: %v\n", err)
|
||||
}
|
||||
}()
|
||||
defer server.Shutdown(nil)
|
||||
|
||||
// Wait for the server to start
|
||||
for i := 0; i < 20; i++ {
|
||||
if _, err := http.Get(url); err == nil {
|
||||
break
|
||||
}
|
||||
|
||||
if i == 19 {
|
||||
t.Error("http server did not start")
|
||||
return
|
||||
}
|
||||
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
}
|
||||
|
||||
for _, testCase := range []struct {
|
||||
name string
|
||||
schemaPathTemplate string
|
||||
strict bool
|
||||
resourceKind, resourceAPIVersion, k8sversion string
|
||||
expect []byte
|
||||
expectErr error
|
||||
}{
|
||||
{
|
||||
"retry connection reset by peer",
|
||||
fmt.Sprintf("%s/simulate-reset", url),
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
[]byte(http.StatusText(http.StatusOK)),
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"getting 404",
|
||||
fmt.Sprintf("%s/404", url),
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
nil,
|
||||
fmt.Errorf("could not find schema at %s/404", url),
|
||||
},
|
||||
{
|
||||
"getting 500",
|
||||
fmt.Sprintf("%s/500", url),
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
nil,
|
||||
fmt.Errorf("failed downloading schema at %s/500: Get \"%s/500\": GET %s/500 giving up after 3 attempt(s)", url, url, url),
|
||||
},
|
||||
{
|
||||
"retry 503",
|
||||
fmt.Sprintf("%s/503", url),
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
[]byte(http.StatusText(http.StatusOK)),
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"200",
|
||||
url,
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
[]byte(http.StatusText(http.StatusOK)),
|
||||
nil,
|
||||
},
|
||||
} {
|
||||
callCounts = map[string]int{} // Reinitialise counters
|
||||
|
||||
reg, err := newHTTPRegistry(testCase.schemaPathTemplate, "", testCase.strict, true, true)
|
||||
if err != nil {
|
||||
t.Errorf("during test '%s': failed to create registry: %s", testCase.name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
_, res, err := reg.DownloadSchema(testCase.resourceKind, testCase.resourceAPIVersion, testCase.k8sversion)
|
||||
if err == nil || testCase.expectErr == nil {
|
||||
if err == nil && testCase.expectErr != nil {
|
||||
t.Errorf("during test '%s': expected error\n%s, got nil", testCase.name, testCase.expectErr)
|
||||
}
|
||||
if err != nil && testCase.expectErr == nil {
|
||||
t.Errorf("during test '%s': expected no error, got\n%s\n", testCase.name, err)
|
||||
}
|
||||
} else if err.Error() != testCase.expectErr.Error() {
|
||||
t.Errorf("during test '%s': expected error\n%s, got:\n%s\n", testCase.name, testCase.expectErr, err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(res, testCase.expect) {
|
||||
t.Errorf("during test '%s': expected '%s', got '%s'", testCase.name, testCase.expect, res)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
//func TestDownloadSchema(t *testing.T) {
|
||||
// callCounts := map[string]int{}
|
||||
//
|
||||
// // http server to simulate different responses
|
||||
// http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
// var s int
|
||||
// callCounts[r.URL.Path]++
|
||||
// callCount := callCounts[r.URL.Path]
|
||||
//
|
||||
// switch r.URL.Path {
|
||||
// case "/404":
|
||||
// s = http.StatusNotFound
|
||||
// case "/500":
|
||||
// s = http.StatusInternalServerError
|
||||
// case "/503":
|
||||
// if callCount < 2 {
|
||||
// s = http.StatusServiceUnavailable
|
||||
// } else {
|
||||
// s = http.StatusOK // Should succeed on 3rd try
|
||||
// }
|
||||
//
|
||||
// case "/simulate-reset":
|
||||
// if callCount < 2 {
|
||||
// if hj, ok := w.(http.Hijacker); ok {
|
||||
// conn, _, err := hj.Hijack()
|
||||
// if err != nil {
|
||||
// fmt.Printf("Hijacking failed: %v\n", err)
|
||||
// return
|
||||
// }
|
||||
// conn.Close() // Close the connection to simulate a reset
|
||||
// }
|
||||
// return
|
||||
// }
|
||||
// s = http.StatusOK // Should succeed on third try
|
||||
//
|
||||
// default:
|
||||
// s = http.StatusOK
|
||||
// }
|
||||
//
|
||||
// w.WriteHeader(s)
|
||||
// w.Write([]byte(http.StatusText(s)))
|
||||
// })
|
||||
//
|
||||
// port := fmt.Sprint(rand.Intn(1000) + 9000) // random port
|
||||
// server := &http.Server{Addr: "127.0.0.1:" + port}
|
||||
// url := fmt.Sprintf("http://localhost:%s", port)
|
||||
//
|
||||
// go func() {
|
||||
// if err := server.ListenAndServe(); err != nil {
|
||||
// fmt.Printf("Failed to start server: %v\n", err)
|
||||
// }
|
||||
// }()
|
||||
// defer server.Shutdown(nil)
|
||||
//
|
||||
// // Wait for the server to start
|
||||
// for i := 0; i < 20; i++ {
|
||||
// if _, err := http.Get(url); err == nil {
|
||||
// break
|
||||
// }
|
||||
//
|
||||
// if i == 19 {
|
||||
// t.Error("http server did not start")
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// time.Sleep(50 * time.Millisecond)
|
||||
// }
|
||||
//
|
||||
// for _, testCase := range []struct {
|
||||
// name string
|
||||
// schemaPathTemplate string
|
||||
// strict bool
|
||||
// resourceKind, resourceAPIVersion, k8sversion string
|
||||
// expect []byte
|
||||
// expectErr error
|
||||
// }{
|
||||
// {
|
||||
// "retry connection reset by peer",
|
||||
// fmt.Sprintf("%s/simulate-reset", url),
|
||||
// true,
|
||||
// "Deployment",
|
||||
// "v1",
|
||||
// "1.18.0",
|
||||
// []byte(http.StatusText(http.StatusOK)),
|
||||
// nil,
|
||||
// },
|
||||
// {
|
||||
// "getting 404",
|
||||
// fmt.Sprintf("%s/404", url),
|
||||
// true,
|
||||
// "Deployment",
|
||||
// "v1",
|
||||
// "1.18.0",
|
||||
// nil,
|
||||
// fmt.Errorf("could not find schema at %s/404", url),
|
||||
// },
|
||||
// {
|
||||
// "getting 500",
|
||||
// fmt.Sprintf("%s/500", url),
|
||||
// true,
|
||||
// "Deployment",
|
||||
// "v1",
|
||||
// "1.18.0",
|
||||
// nil,
|
||||
// fmt.Errorf("failed downloading schema at %s/500: Get \"%s/500\": GET %s/500 giving up after 3 attempt(s)", url, url, url),
|
||||
// },
|
||||
// {
|
||||
// "retry 503",
|
||||
// fmt.Sprintf("%s/503", url),
|
||||
// true,
|
||||
// "Deployment",
|
||||
// "v1",
|
||||
// "1.18.0",
|
||||
// []byte(http.StatusText(http.StatusOK)),
|
||||
// nil,
|
||||
// },
|
||||
// {
|
||||
// "200",
|
||||
// url,
|
||||
// true,
|
||||
// "Deployment",
|
||||
// "v1",
|
||||
// "1.18.0",
|
||||
// []byte(http.StatusText(http.StatusOK)),
|
||||
// nil,
|
||||
// },
|
||||
// } {
|
||||
// callCounts = map[string]int{} // Reinitialise counters
|
||||
//
|
||||
// reg, err := newHTTPRegistry(testCase.schemaPathTemplate, "", testCase.strict, true, true)
|
||||
// if err != nil {
|
||||
// t.Errorf("during test '%s': failed to create registry: %s", testCase.name, err)
|
||||
// continue
|
||||
// }
|
||||
//
|
||||
// _, res, err := reg.DownloadSchema(testCase.resourceKind, testCase.resourceAPIVersion, testCase.k8sversion)
|
||||
// if err == nil || testCase.expectErr == nil {
|
||||
// if err == nil && testCase.expectErr != nil {
|
||||
// t.Errorf("during test '%s': expected error\n%s, got nil", testCase.name, testCase.expectErr)
|
||||
// }
|
||||
// if err != nil && testCase.expectErr == nil {
|
||||
// t.Errorf("during test '%s': expected no error, got\n%s\n", testCase.name, err)
|
||||
// }
|
||||
// } else if err.Error() != testCase.expectErr.Error() {
|
||||
// t.Errorf("during test '%s': expected error\n%s, got:\n%s\n", testCase.name, testCase.expectErr, err)
|
||||
// }
|
||||
//
|
||||
// if !bytes.Equal(res, testCase.expect) {
|
||||
// t.Errorf("during test '%s': expected '%s', got '%s'", testCase.name, testCase.expect, res)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
//}
|
||||
|
|
|
|||
|
|
@ -1,63 +1,33 @@
|
|||
package registry
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
)
|
||||
|
||||
type LocalRegistry struct {
|
||||
pathTemplate string
|
||||
strict bool
|
||||
debug bool
|
||||
loader jsonschema.URLLoader
|
||||
}
|
||||
|
||||
// NewLocalSchemas creates a new "registry", that will serve schemas from files, given a list of schema filenames
|
||||
func newLocalRegistry(pathTemplate string, strict bool, debug bool) (*LocalRegistry, error) {
|
||||
func newLocalRegistry(pathTemplate string, loader jsonschema.URLLoader, strict bool, debug bool) (*LocalRegistry, error) {
|
||||
return &LocalRegistry{
|
||||
pathTemplate,
|
||||
strict,
|
||||
debug,
|
||||
loader,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// DownloadSchema retrieves the schema from a file for the resource
|
||||
func (r LocalRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error) {
|
||||
func (r LocalRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, any, error) {
|
||||
schemaFile, err := schemaPath(r.pathTemplate, resourceKind, resourceAPIVersion, k8sVersion, r.strict)
|
||||
if err != nil {
|
||||
return schemaFile, []byte{}, nil
|
||||
}
|
||||
f, err := os.Open(schemaFile)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
msg := fmt.Sprintf("could not open file %s", schemaFile)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return schemaFile, nil, newNotFoundError(errors.New(msg))
|
||||
}
|
||||
|
||||
msg := fmt.Sprintf("failed to open schema at %s: %s", schemaFile, err)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return schemaFile, nil, errors.New(msg)
|
||||
}
|
||||
|
||||
defer f.Close()
|
||||
content, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed to read schema at %s: %s", schemaFile, err)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return schemaFile, nil, err
|
||||
}
|
||||
|
||||
if r.debug {
|
||||
log.Printf("using schema found at %s", schemaFile)
|
||||
}
|
||||
return schemaFile, content, nil
|
||||
s, err := r.loader.Load(schemaFile)
|
||||
return schemaFile, s, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,9 @@ package registry
|
|||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"github.com/yannh/kubeconform/pkg/loader"
|
||||
"os"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
|
@ -13,25 +16,9 @@ type Manifest struct {
|
|||
|
||||
// Registry is an interface that should be implemented by any source of Kubernetes schemas
|
||||
type Registry interface {
|
||||
DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error)
|
||||
DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, any, error)
|
||||
}
|
||||
|
||||
// Retryable indicates whether an error is a temporary or a permanent failure
|
||||
type Retryable interface {
|
||||
IsNotFound() bool
|
||||
}
|
||||
|
||||
// NotFoundError is returned when the registry does not contain a schema for the resource
|
||||
type NotFoundError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func newNotFoundError(err error) *NotFoundError {
|
||||
return &NotFoundError{err}
|
||||
}
|
||||
func (e *NotFoundError) Error() string { return e.err.Error() }
|
||||
func (e *NotFoundError) Retryable() bool { return false }
|
||||
|
||||
func schemaPath(tpl, resourceKind, resourceAPIVersion, k8sVersion string, strict bool) (string, error) {
|
||||
normalisedVersion := k8sVersion
|
||||
if normalisedVersion != "master" {
|
||||
|
|
@ -81,7 +68,7 @@ func schemaPath(tpl, resourceKind, resourceAPIVersion, k8sVersion string, strict
|
|||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func New(schemaLocation string, cache string, strict bool, skipTLS bool, debug bool) (Registry, error) {
|
||||
func New(schemaLocation string, cacheFolder string, strict bool, skipTLS bool, debug bool) (Registry, error) {
|
||||
if schemaLocation == "default" {
|
||||
schemaLocation = "https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/{{ .NormalizedKubernetesVersion }}-standalone{{ .StrictSuffix }}/{{ .ResourceKind }}{{ .KindSuffix }}.json"
|
||||
} else if !strings.HasSuffix(schemaLocation, "json") { // If we dont specify a full templated path, we assume the paths of our fork of kubernetes-json-schema
|
||||
|
|
@ -93,9 +80,29 @@ func New(schemaLocation string, cache string, strict bool, skipTLS bool, debug b
|
|||
return nil, fmt.Errorf("failed initialising schema location registry: %s", err)
|
||||
}
|
||||
|
||||
if strings.HasPrefix(schemaLocation, "http") {
|
||||
return newHTTPRegistry(schemaLocation, cache, strict, skipTLS, debug)
|
||||
var c cache.Cache = nil
|
||||
if cacheFolder != "" {
|
||||
fi, err := os.Stat(cacheFolder)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed opening cache folder %s: %s", cacheFolder, err)
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return nil, fmt.Errorf("cache folder %s is not a directory", err)
|
||||
}
|
||||
|
||||
c = cache.NewOnDiskCache(cacheFolder)
|
||||
} else {
|
||||
c = cache.NewInMemoryCache()
|
||||
}
|
||||
|
||||
return newLocalRegistry(schemaLocation, strict, debug)
|
||||
if strings.HasPrefix(schemaLocation, "http") {
|
||||
httpLoader, err := loader.NewHTTPURLLoader(skipTLS, c)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed creating HTTP loader: %s", err)
|
||||
}
|
||||
return newHTTPRegistry(schemaLocation, httpLoader, strict, debug)
|
||||
}
|
||||
|
||||
fileLoader := loader.NewFileLoader(c)
|
||||
return newLocalRegistry(schemaLocation, fileLoader, strict, debug)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,18 +2,20 @@
|
|||
package validator
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
jsonschema "github.com/santhosh-tekuri/jsonschema/v5"
|
||||
_ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
jsonschema "github.com/santhosh-tekuri/jsonschema/v6"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"github.com/yannh/kubeconform/pkg/loader"
|
||||
"github.com/yannh/kubeconform/pkg/registry"
|
||||
"github.com/yannh/kubeconform/pkg/resource"
|
||||
"golang.org/x/text/language"
|
||||
"golang.org/x/text/message"
|
||||
"io"
|
||||
"os"
|
||||
"sigs.k8s.io/yaml"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Different types of validation results
|
||||
|
|
@ -92,19 +94,48 @@ func New(schemaLocations []string, opts Opts) (Validator, error) {
|
|||
opts.RejectKinds = map[string]struct{}{}
|
||||
}
|
||||
|
||||
var filecache cache.Cache = nil
|
||||
if opts.Cache != "" {
|
||||
fi, err := os.Stat(opts.Cache)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed opening cache folder %s: %s", opts.Cache, err)
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return nil, fmt.Errorf("cache folder %s is not a directory", err)
|
||||
}
|
||||
|
||||
filecache = cache.NewOnDiskCache(opts.Cache)
|
||||
}
|
||||
|
||||
httpLoader, err := loader.NewHTTPURLLoader(false, filecache)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed creating HTTP loader: %s", err)
|
||||
}
|
||||
|
||||
return &v{
|
||||
opts: opts,
|
||||
schemaDownload: downloadSchema,
|
||||
schemaCache: cache.NewInMemoryCache(),
|
||||
regs: registries,
|
||||
opts: opts,
|
||||
schemaDownload: downloadSchema,
|
||||
schemaMemoryCache: cache.NewInMemoryCache(),
|
||||
regs: registries,
|
||||
loader: jsonschema.SchemeURLLoader{
|
||||
"file": jsonschema.FileLoader{},
|
||||
"http": httpLoader,
|
||||
"https": httpLoader,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
type v struct {
|
||||
opts Opts
|
||||
schemaCache cache.Cache
|
||||
schemaDownload func(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error)
|
||||
regs []registry.Registry
|
||||
opts Opts
|
||||
schemaDiskCache cache.Cache
|
||||
schemaMemoryCache cache.Cache
|
||||
schemaDownload func(registries []registry.Registry, loader jsonschema.SchemeURLLoader, kind, version, k8sVersion string) (*jsonschema.Schema, error)
|
||||
regs []registry.Registry
|
||||
loader jsonschema.SchemeURLLoader
|
||||
}
|
||||
|
||||
func key(resourceKind, resourceAPIVersion, k8sVersion string) string {
|
||||
return fmt.Sprintf("%s-%s-%s", resourceKind, resourceAPIVersion, k8sVersion)
|
||||
}
|
||||
|
||||
// ValidateResource validates a single resource. This allows to validate
|
||||
|
|
@ -165,8 +196,8 @@ func (val *v) ValidateResource(res resource.Resource) Result {
|
|||
cached := false
|
||||
var schema *jsonschema.Schema
|
||||
|
||||
if val.schemaCache != nil {
|
||||
s, err := val.schemaCache.Get(sig.Kind, sig.Version, val.opts.KubernetesVersion)
|
||||
if val.schemaMemoryCache != nil {
|
||||
s, err := val.schemaMemoryCache.Get(key(sig.Kind, sig.Version, val.opts.KubernetesVersion))
|
||||
if err == nil {
|
||||
cached = true
|
||||
schema = s.(*jsonschema.Schema)
|
||||
|
|
@ -174,12 +205,12 @@ func (val *v) ValidateResource(res resource.Resource) Result {
|
|||
}
|
||||
|
||||
if !cached {
|
||||
if schema, err = val.schemaDownload(val.regs, sig.Kind, sig.Version, val.opts.KubernetesVersion); err != nil {
|
||||
if schema, err = val.schemaDownload(val.regs, val.loader, sig.Kind, sig.Version, val.opts.KubernetesVersion); err != nil {
|
||||
return Result{Resource: res, Err: err, Status: Error}
|
||||
}
|
||||
|
||||
if val.schemaCache != nil {
|
||||
val.schemaCache.Set(sig.Kind, sig.Version, val.opts.KubernetesVersion, schema)
|
||||
if val.schemaMemoryCache != nil {
|
||||
val.schemaMemoryCache.Set(key(sig.Kind, sig.Version, val.opts.KubernetesVersion), schema)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -197,17 +228,22 @@ func (val *v) ValidateResource(res resource.Resource) Result {
|
|||
var e *jsonschema.ValidationError
|
||||
if errors.As(err, &e) {
|
||||
for _, ve := range e.Causes {
|
||||
path := ""
|
||||
for _, f := range ve.InstanceLocation {
|
||||
path = path + "/" + f
|
||||
}
|
||||
validationErrors = append(validationErrors, ValidationError{
|
||||
Path: ve.InstanceLocation,
|
||||
Msg: ve.Message,
|
||||
Path: path,
|
||||
Msg: ve.ErrorKind.LocalizedString(message.NewPrinter(language.English)),
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return Result{
|
||||
Resource: res,
|
||||
Status: Invalid,
|
||||
Err: fmt.Errorf("problem validating schema. Check JSON formatting: %s", err),
|
||||
Err: fmt.Errorf("problem validating schema. Check JSON formatting: %s", strings.ReplaceAll(err.Error(), "\n", " ")),
|
||||
ValidationErrors: validationErrors,
|
||||
}
|
||||
}
|
||||
|
|
@ -248,17 +284,18 @@ func (val *v) Validate(filename string, r io.ReadCloser) []Result {
|
|||
return val.ValidateWithContext(context.Background(), filename, r)
|
||||
}
|
||||
|
||||
func downloadSchema(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error) {
|
||||
func downloadSchema(registries []registry.Registry, l jsonschema.SchemeURLLoader, kind, version, k8sVersion string) (*jsonschema.Schema, error) {
|
||||
var err error
|
||||
var schemaBytes []byte
|
||||
var path string
|
||||
var s any
|
||||
|
||||
for _, reg := range registries {
|
||||
path, schemaBytes, err = reg.DownloadSchema(kind, version, k8sVersion)
|
||||
path, s, err = reg.DownloadSchema(kind, version, k8sVersion)
|
||||
if err == nil {
|
||||
c := jsonschema.NewCompiler()
|
||||
c.Draft = jsonschema.Draft4
|
||||
if err := c.AddResource(path, bytes.NewReader(schemaBytes)); err != nil {
|
||||
c.UseLoader(l)
|
||||
c.DefaultDraft(jsonschema.Draft4)
|
||||
if err := c.AddResource(path, s); err != nil {
|
||||
continue
|
||||
}
|
||||
schema, err := c.Compile(path)
|
||||
|
|
@ -266,14 +303,12 @@ func downloadSchema(registries []registry.Registry, kind, version, k8sVersion st
|
|||
if err != nil {
|
||||
continue
|
||||
}
|
||||
return schema, err
|
||||
return schema, nil
|
||||
}
|
||||
|
||||
// If we get a 404, we try the next registry, but we exit if we get a real failure
|
||||
if _, notfound := err.(*registry.NotFoundError); notfound {
|
||||
if _, notfound := err.(*loader.NotFoundError); notfound {
|
||||
continue
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@ package validator
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
"github.com/yannh/kubeconform/pkg/loader"
|
||||
"io"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
|
@ -12,16 +14,16 @@ import (
|
|||
)
|
||||
|
||||
type mockRegistry struct {
|
||||
SchemaDownloader func() (string, []byte, error)
|
||||
SchemaDownloader func() (string, any, error)
|
||||
}
|
||||
|
||||
func newMockRegistry(f func() (string, []byte, error)) *mockRegistry {
|
||||
func newMockRegistry(f func() (string, any, error)) *mockRegistry {
|
||||
return &mockRegistry{
|
||||
SchemaDownloader: f,
|
||||
}
|
||||
}
|
||||
|
||||
func (m mockRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error) {
|
||||
func (m mockRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, any, error) {
|
||||
return m.SchemaDownloader()
|
||||
}
|
||||
|
||||
|
|
@ -106,7 +108,7 @@ lastName: bar
|
|||
[]ValidationError{
|
||||
{
|
||||
Path: "/firstName",
|
||||
Msg: "expected number, but got string",
|
||||
Msg: "got string, want number",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -145,7 +147,7 @@ firstName: foo
|
|||
[]ValidationError{
|
||||
{
|
||||
Path: "",
|
||||
Msg: "missing properties: 'lastName'",
|
||||
Msg: "missing property 'lastName'",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -314,7 +316,7 @@ lastName: bar
|
|||
}`),
|
||||
false,
|
||||
false,
|
||||
Valid,
|
||||
Error,
|
||||
[]ValidationError{},
|
||||
},
|
||||
{
|
||||
|
|
@ -359,7 +361,7 @@ lastName: bar
|
|||
[]byte(`<html>error page</html>`),
|
||||
true,
|
||||
false,
|
||||
Skipped,
|
||||
Error,
|
||||
[]ValidationError{},
|
||||
},
|
||||
{
|
||||
|
|
@ -385,14 +387,21 @@ lastName: bar
|
|||
IgnoreMissingSchemas: testCase.ignoreMissingSchema,
|
||||
Strict: testCase.strict,
|
||||
},
|
||||
schemaCache: nil,
|
||||
schemaDownload: downloadSchema,
|
||||
regs: []registry.Registry{
|
||||
newMockRegistry(func() (string, []byte, error) {
|
||||
return "", testCase.schemaRegistry1, nil
|
||||
newMockRegistry(func() (string, any, error) {
|
||||
if testCase.schemaRegistry1 == nil {
|
||||
return "", nil, loader.NewNotFoundError(nil)
|
||||
}
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(testCase.schemaRegistry1))
|
||||
return "", s, err
|
||||
}),
|
||||
newMockRegistry(func() (string, []byte, error) {
|
||||
return "", testCase.schemaRegistry2, nil
|
||||
newMockRegistry(func() (string, any, error) {
|
||||
if testCase.schemaRegistry2 == nil {
|
||||
return "", nil, loader.NewNotFoundError(nil)
|
||||
}
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(testCase.schemaRegistry2))
|
||||
return "", s, err
|
||||
}),
|
||||
},
|
||||
}
|
||||
|
|
@ -447,8 +456,8 @@ age: not a number
|
|||
}`)
|
||||
|
||||
expectedErrors := []ValidationError{
|
||||
{Path: "", Msg: "missing properties: 'lastName'"},
|
||||
{Path: "/age", Msg: "expected integer, but got string"},
|
||||
{Path: "", Msg: "missing property 'lastName'"},
|
||||
{Path: "/age", Msg: "got string, want integer"},
|
||||
}
|
||||
|
||||
val := v{
|
||||
|
|
@ -456,11 +465,11 @@ age: not a number
|
|||
SkipKinds: map[string]struct{}{},
|
||||
RejectKinds: map[string]struct{}{},
|
||||
},
|
||||
schemaCache: nil,
|
||||
schemaDownload: downloadSchema,
|
||||
regs: []registry.Registry{
|
||||
newMockRegistry(func() (string, []byte, error) {
|
||||
return "", schema, nil
|
||||
newMockRegistry(func() (string, any, error) {
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(schema))
|
||||
return "", s, err
|
||||
}),
|
||||
},
|
||||
}
|
||||
|
|
@ -505,11 +514,11 @@ firstName: foo
|
|||
SkipKinds: map[string]struct{}{},
|
||||
RejectKinds: map[string]struct{}{},
|
||||
},
|
||||
schemaCache: nil,
|
||||
schemaDownload: downloadSchema,
|
||||
regs: []registry.Registry{
|
||||
newMockRegistry(func() (string, []byte, error) {
|
||||
return "", schema, nil
|
||||
newMockRegistry(func() (string, any, error) {
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(schema))
|
||||
return "", s, err
|
||||
}),
|
||||
},
|
||||
}
|
||||
|
|
@ -523,7 +532,7 @@ firstName: foo
|
|||
|
||||
expectedStatuses := []Status{Valid, Invalid}
|
||||
expectedValidationErrors := []ValidationError{
|
||||
{Path: "", Msg: "missing properties: 'lastName'"},
|
||||
{Path: "", Msg: "missing property 'lastName'"},
|
||||
}
|
||||
if !reflect.DeepEqual(expectedStatuses, gotStatuses) {
|
||||
t.Errorf("Expected %+v, got %+v", expectedStatuses, gotStatuses)
|
||||
|
|
|
|||
4
vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
generated
vendored
4
vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
generated
vendored
|
|
@ -1,4 +0,0 @@
|
|||
.vscode
|
||||
.idea
|
||||
*.swp
|
||||
cmd/jv/jv
|
||||
220
vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
generated
vendored
220
vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
generated
vendored
|
|
@ -1,220 +0,0 @@
|
|||
# jsonschema v5.3.1
|
||||
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
|
||||
[](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema/v5)
|
||||
[](https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml)
|
||||
[](https://codecov.io/gh/santhosh-tekuri/jsonschema)
|
||||
|
||||
Package jsonschema provides json-schema compilation and validation.
|
||||
|
||||
[Benchmarks](https://dev.to/vearutop/benchmarking-correctness-and-performance-of-go-json-schema-validators-3247)
|
||||
|
||||
### Features:
|
||||
- implements
|
||||
[draft 2020-12](https://json-schema.org/specification-links.html#2020-12),
|
||||
[draft 2019-09](https://json-schema.org/specification-links.html#draft-2019-09-formerly-known-as-draft-8),
|
||||
[draft-7](https://json-schema.org/specification-links.html#draft-7),
|
||||
[draft-6](https://json-schema.org/specification-links.html#draft-6),
|
||||
[draft-4](https://json-schema.org/specification-links.html#draft-4)
|
||||
- fully compliant with [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite), (excluding some optional)
|
||||
- list of optional tests that are excluded can be found in schema_test.go(variable [skipTests](https://github.com/santhosh-tekuri/jsonschema/blob/master/schema_test.go#L24))
|
||||
- validates schemas against meta-schema
|
||||
- full support of remote references
|
||||
- support of recursive references between schemas
|
||||
- detects infinite loop in schemas
|
||||
- thread safe validation
|
||||
- rich, intuitive hierarchial error messages with json-pointers to exact location
|
||||
- supports output formats flag, basic and detailed
|
||||
- supports enabling format and content Assertions in draft2019-09 or above
|
||||
- change `Compiler.AssertFormat`, `Compiler.AssertContent` to `true`
|
||||
- compiled schema can be introspected. easier to develop tools like generating go structs given schema
|
||||
- supports user-defined keywords via [extensions](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-Extension)
|
||||
- implements following formats (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedFormat))
|
||||
- date-time, date, time, duration, period (supports leap-second)
|
||||
- uuid, hostname, email
|
||||
- ip-address, ipv4, ipv6
|
||||
- uri, uriref, uri-template(limited validation)
|
||||
- json-pointer, relative-json-pointer
|
||||
- regex, format
|
||||
- implements following contentEncoding (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
|
||||
- base64
|
||||
- implements following contentMediaType (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
|
||||
- application/json
|
||||
- can load from files/http/https/[string](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-FromString)/[]byte/io.Reader (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedLoader))
|
||||
|
||||
|
||||
see examples in [godoc](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
|
||||
|
||||
The schema is compiled against the version specified in `$schema` property.
|
||||
If "$schema" property is missing, it uses latest draft which currently implemented
|
||||
by this library.
|
||||
|
||||
You can force to use specific version, when `$schema` is missing, as follows:
|
||||
|
||||
```go
|
||||
compiler := jsonschema.NewCompiler()
|
||||
compiler.Draft = jsonschema.Draft4
|
||||
```
|
||||
|
||||
This package supports loading json-schema from filePath and fileURL.
|
||||
|
||||
To load json-schema from HTTPURL, add following import:
|
||||
|
||||
```go
|
||||
import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
```
|
||||
|
||||
## Rich Errors
|
||||
|
||||
The ValidationError returned by Validate method contains detailed context to understand why and where the error is.
|
||||
|
||||
schema.json:
|
||||
```json
|
||||
{
|
||||
"$ref": "t.json#/definitions/employee"
|
||||
}
|
||||
```
|
||||
|
||||
t.json:
|
||||
```json
|
||||
{
|
||||
"definitions": {
|
||||
"employee": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
doc.json:
|
||||
```json
|
||||
1
|
||||
```
|
||||
|
||||
assuming `err` is the ValidationError returned when `doc.json` validated with `schema.json`,
|
||||
```go
|
||||
fmt.Printf("%#v\n", err) // using %#v prints errors hierarchy
|
||||
```
|
||||
Prints:
|
||||
```
|
||||
[I#] [S#] doesn't validate with file:///Users/santhosh/jsonschema/schema.json#
|
||||
[I#] [S#/$ref] doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'
|
||||
[I#] [S#/definitions/employee/type] expected string, but got number
|
||||
```
|
||||
|
||||
Here `I` stands for instance document and `S` stands for schema document.
|
||||
The json-fragments that caused error in instance and schema documents are represented using json-pointer notation.
|
||||
Nested causes are printed with indent.
|
||||
|
||||
To output `err` in `flag` output format:
|
||||
```go
|
||||
b, _ := json.MarshalIndent(err.FlagOutput(), "", " ")
|
||||
fmt.Println(string(b))
|
||||
```
|
||||
Prints:
|
||||
```json
|
||||
{
|
||||
"valid": false
|
||||
}
|
||||
```
|
||||
To output `err` in `basic` output format:
|
||||
```go
|
||||
b, _ := json.MarshalIndent(err.BasicOutput(), "", " ")
|
||||
fmt.Println(string(b))
|
||||
```
|
||||
Prints:
|
||||
```json
|
||||
{
|
||||
"valid": false,
|
||||
"errors": [
|
||||
{
|
||||
"keywordLocation": "",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
|
||||
"instanceLocation": "",
|
||||
"error": "doesn't validate with file:///Users/santhosh/jsonschema/schema.json#"
|
||||
},
|
||||
{
|
||||
"keywordLocation": "/$ref",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
|
||||
"instanceLocation": "",
|
||||
"error": "doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'"
|
||||
},
|
||||
{
|
||||
"keywordLocation": "/$ref/type",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
|
||||
"instanceLocation": "",
|
||||
"error": "expected string, but got number"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
To output `err` in `detailed` output format:
|
||||
```go
|
||||
b, _ := json.MarshalIndent(err.DetailedOutput(), "", " ")
|
||||
fmt.Println(string(b))
|
||||
```
|
||||
Prints:
|
||||
```json
|
||||
{
|
||||
"valid": false,
|
||||
"keywordLocation": "",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
|
||||
"instanceLocation": "",
|
||||
"errors": [
|
||||
{
|
||||
"valid": false,
|
||||
"keywordLocation": "/$ref",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
|
||||
"instanceLocation": "",
|
||||
"errors": [
|
||||
{
|
||||
"valid": false,
|
||||
"keywordLocation": "/$ref/type",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
|
||||
"instanceLocation": "",
|
||||
"error": "expected string, but got number"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## CLI
|
||||
|
||||
to install `go install github.com/santhosh-tekuri/jsonschema/cmd/jv@latest`
|
||||
|
||||
```bash
|
||||
jv [-draft INT] [-output FORMAT] [-assertformat] [-assertcontent] <json-schema> [<json-or-yaml-doc>]...
|
||||
-assertcontent
|
||||
enable content assertions with draft >= 2019
|
||||
-assertformat
|
||||
enable format assertions with draft >= 2019
|
||||
-draft int
|
||||
draft used when '$schema' attribute is missing. valid values 4, 5, 7, 2019, 2020 (default 2020)
|
||||
-output string
|
||||
output format. valid values flag, basic, detailed
|
||||
```
|
||||
|
||||
if no `<json-or-yaml-doc>` arguments are passed, it simply validates the `<json-schema>`.
|
||||
if `$schema` attribute is missing in schema, it uses latest version. this can be overridden by passing `-draft` flag
|
||||
|
||||
exit-code is 1, if there are any validation errors
|
||||
|
||||
`jv` can also validate yaml files. It also accepts schema from yaml files.
|
||||
|
||||
## Validating YAML Documents
|
||||
|
||||
since yaml supports non-string keys, such yaml documents are rendered as invalid json documents.
|
||||
|
||||
most yaml parser use `map[interface{}]interface{}` for object,
|
||||
whereas json parser uses `map[string]interface{}`.
|
||||
|
||||
so we need to manually convert them to `map[string]interface{}`.
|
||||
below code shows such conversion by `toStringKeys` function.
|
||||
|
||||
https://play.golang.org/p/Hhax3MrtD8r
|
||||
|
||||
NOTE: if you are using `gopkg.in/yaml.v3`, then you do not need such conversion. since this library
|
||||
returns `map[string]interface{}` if all keys are strings.
|
||||
812
vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
generated
vendored
812
vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
generated
vendored
|
|
@ -1,812 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/big"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Compiler represents a json-schema compiler.
|
||||
type Compiler struct {
|
||||
// Draft represents the draft used when '$schema' attribute is missing.
|
||||
//
|
||||
// This defaults to latest supported draft (currently 2020-12).
|
||||
Draft *Draft
|
||||
resources map[string]*resource
|
||||
|
||||
// Extensions is used to register extensions.
|
||||
extensions map[string]extension
|
||||
|
||||
// ExtractAnnotations tells whether schema annotations has to be extracted
|
||||
// in compiled Schema or not.
|
||||
ExtractAnnotations bool
|
||||
|
||||
// LoadURL loads the document at given absolute URL.
|
||||
//
|
||||
// If nil, package global LoadURL is used.
|
||||
LoadURL func(s string) (io.ReadCloser, error)
|
||||
|
||||
// Formats can be registered by adding to this map. Key is format name,
|
||||
// value is function that knows how to validate that format.
|
||||
Formats map[string]func(interface{}) bool
|
||||
|
||||
// AssertFormat for specifications >= draft2019-09.
|
||||
AssertFormat bool
|
||||
|
||||
// Decoders can be registered by adding to this map. Key is encoding name,
|
||||
// value is function that knows how to decode string in that format.
|
||||
Decoders map[string]func(string) ([]byte, error)
|
||||
|
||||
// MediaTypes can be registered by adding to this map. Key is mediaType name,
|
||||
// value is function that knows how to validate that mediaType.
|
||||
MediaTypes map[string]func([]byte) error
|
||||
|
||||
// AssertContent for specifications >= draft2019-09.
|
||||
AssertContent bool
|
||||
}
|
||||
|
||||
// Compile parses json-schema at given url returns, if successful,
|
||||
// a Schema object that can be used to match against json.
|
||||
//
|
||||
// Returned error can be *SchemaError
|
||||
func Compile(url string) (*Schema, error) {
|
||||
return NewCompiler().Compile(url)
|
||||
}
|
||||
|
||||
// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
|
||||
// It simplifies safe initialization of global variables holding compiled Schemas.
|
||||
func MustCompile(url string) *Schema {
|
||||
return NewCompiler().MustCompile(url)
|
||||
}
|
||||
|
||||
// CompileString parses and compiles the given schema with given base url.
|
||||
func CompileString(url, schema string) (*Schema, error) {
|
||||
c := NewCompiler()
|
||||
if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.Compile(url)
|
||||
}
|
||||
|
||||
// MustCompileString is like CompileString but panics on error.
|
||||
// It simplified safe initialization of global variables holding compiled Schema.
|
||||
func MustCompileString(url, schema string) *Schema {
|
||||
c := NewCompiler()
|
||||
if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return c.MustCompile(url)
|
||||
}
|
||||
|
||||
// NewCompiler returns a json-schema Compiler object.
|
||||
// if '$schema' attribute is missing, it is treated as draft7. to change this
|
||||
// behavior change Compiler.Draft value
|
||||
func NewCompiler() *Compiler {
|
||||
return &Compiler{
|
||||
Draft: latest,
|
||||
resources: make(map[string]*resource),
|
||||
Formats: make(map[string]func(interface{}) bool),
|
||||
Decoders: make(map[string]func(string) ([]byte, error)),
|
||||
MediaTypes: make(map[string]func([]byte) error),
|
||||
extensions: make(map[string]extension),
|
||||
}
|
||||
}
|
||||
|
||||
// AddResource adds in-memory resource to the compiler.
|
||||
//
|
||||
// Note that url must not have fragment
|
||||
func (c *Compiler) AddResource(url string, r io.Reader) error {
|
||||
res, err := newResource(url, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.resources[res.url] = res
|
||||
return nil
|
||||
}
|
||||
|
||||
// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
|
||||
// It simplifies safe initialization of global variables holding compiled Schemas.
|
||||
func (c *Compiler) MustCompile(url string) *Schema {
|
||||
s, err := c.Compile(url)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("jsonschema: %#v", err))
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Compile parses json-schema at given url returns, if successful,
|
||||
// a Schema object that can be used to match against json.
|
||||
//
|
||||
// error returned will be of type *SchemaError
|
||||
func (c *Compiler) Compile(url string) (*Schema, error) {
|
||||
// make url absolute
|
||||
u, err := toAbs(url)
|
||||
if err != nil {
|
||||
return nil, &SchemaError{url, err}
|
||||
}
|
||||
url = u
|
||||
|
||||
sch, err := c.compileURL(url, nil, "#")
|
||||
if err != nil {
|
||||
err = &SchemaError{url, err}
|
||||
}
|
||||
return sch, err
|
||||
}
|
||||
|
||||
func (c *Compiler) findResource(url string) (*resource, error) {
|
||||
if _, ok := c.resources[url]; !ok {
|
||||
// load resource
|
||||
var rdr io.Reader
|
||||
if sch, ok := vocabSchemas[url]; ok {
|
||||
rdr = strings.NewReader(sch)
|
||||
} else {
|
||||
loadURL := LoadURL
|
||||
if c.LoadURL != nil {
|
||||
loadURL = c.LoadURL
|
||||
}
|
||||
r, err := loadURL(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer r.Close()
|
||||
rdr = r
|
||||
}
|
||||
if err := c.AddResource(url, rdr); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
r := c.resources[url]
|
||||
if r.draft != nil {
|
||||
return r, nil
|
||||
}
|
||||
|
||||
// set draft
|
||||
r.draft = c.Draft
|
||||
if m, ok := r.doc.(map[string]interface{}); ok {
|
||||
if sch, ok := m["$schema"]; ok {
|
||||
sch, ok := sch.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("jsonschema: invalid $schema in %s", url)
|
||||
}
|
||||
if !isURI(sch) {
|
||||
return nil, fmt.Errorf("jsonschema: $schema must be uri in %s", url)
|
||||
}
|
||||
r.draft = findDraft(sch)
|
||||
if r.draft == nil {
|
||||
sch, _ := split(sch)
|
||||
if sch == url {
|
||||
return nil, fmt.Errorf("jsonschema: unsupported draft in %s", url)
|
||||
}
|
||||
mr, err := c.findResource(sch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.draft = mr.draft
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
id, err := r.draft.resolveID(r.url, r.doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if id != "" {
|
||||
r.url = id
|
||||
}
|
||||
|
||||
if err := r.fillSubschemas(c, r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compileURL(url string, stack []schemaRef, ptr string) (*Schema, error) {
|
||||
// if url points to a draft, return Draft.meta
|
||||
if d := findDraft(url); d != nil && d.meta != nil {
|
||||
return d.meta, nil
|
||||
}
|
||||
|
||||
b, f := split(url)
|
||||
r, err := c.findResource(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.compileRef(r, stack, ptr, r, f)
|
||||
}
|
||||
|
||||
func (c *Compiler) compileRef(r *resource, stack []schemaRef, refPtr string, res *resource, ref string) (*Schema, error) {
|
||||
base := r.baseURL(res.floc)
|
||||
ref, err := resolveURL(base, ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
u, f := split(ref)
|
||||
sr := r.findResource(u)
|
||||
if sr == nil {
|
||||
// external resource
|
||||
return c.compileURL(ref, stack, refPtr)
|
||||
}
|
||||
|
||||
// ensure root resource is always compiled first.
|
||||
// this is required to get schema.meta from root resource
|
||||
if r.schema == nil {
|
||||
r.schema = newSchema(r.url, r.floc, r.draft, r.doc)
|
||||
if _, err := c.compile(r, nil, schemaRef{"#", r.schema, false}, r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
sr, err = r.resolveFragment(c, sr, f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if sr == nil {
|
||||
return nil, fmt.Errorf("jsonschema: %s not found", ref)
|
||||
}
|
||||
|
||||
if sr.schema != nil {
|
||||
if err := checkLoop(stack, schemaRef{refPtr, sr.schema, false}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return sr.schema, nil
|
||||
}
|
||||
|
||||
sr.schema = newSchema(r.url, sr.floc, r.draft, sr.doc)
|
||||
return c.compile(r, stack, schemaRef{refPtr, sr.schema, false}, sr)
|
||||
}
|
||||
|
||||
func (c *Compiler) compileDynamicAnchors(r *resource, res *resource) error {
|
||||
if r.draft.version < 2020 {
|
||||
return nil
|
||||
}
|
||||
|
||||
rr := r.listResources(res)
|
||||
rr = append(rr, res)
|
||||
for _, sr := range rr {
|
||||
if m, ok := sr.doc.(map[string]interface{}); ok {
|
||||
if _, ok := m["$dynamicAnchor"]; ok {
|
||||
sch, err := c.compileRef(r, nil, "IGNORED", r, sr.floc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res.schema.dynamicAnchors = append(res.schema.dynamicAnchors, sch)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compile(r *resource, stack []schemaRef, sref schemaRef, res *resource) (*Schema, error) {
|
||||
if err := c.compileDynamicAnchors(r, res); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch v := res.doc.(type) {
|
||||
case bool:
|
||||
res.schema.Always = &v
|
||||
return res.schema, nil
|
||||
default:
|
||||
return res.schema, c.compileMap(r, stack, sref, res)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Compiler) compileMap(r *resource, stack []schemaRef, sref schemaRef, res *resource) error {
|
||||
m := res.doc.(map[string]interface{})
|
||||
|
||||
if err := checkLoop(stack, sref); err != nil {
|
||||
return err
|
||||
}
|
||||
stack = append(stack, sref)
|
||||
|
||||
var s = res.schema
|
||||
var err error
|
||||
|
||||
if r == res { // root schema
|
||||
if sch, ok := m["$schema"]; ok {
|
||||
sch := sch.(string)
|
||||
if d := findDraft(sch); d != nil {
|
||||
s.meta = d.meta
|
||||
} else {
|
||||
if s.meta, err = c.compileRef(r, stack, "$schema", res, sch); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ref, ok := m["$ref"]; ok {
|
||||
s.Ref, err = c.compileRef(r, stack, "$ref", res, ref.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if r.draft.version < 2019 {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 2019 {
|
||||
if r == res { // root schema
|
||||
if vocab, ok := m["$vocabulary"]; ok {
|
||||
for url, reqd := range vocab.(map[string]interface{}) {
|
||||
if reqd, ok := reqd.(bool); ok && !reqd {
|
||||
continue
|
||||
}
|
||||
if !r.draft.isVocab(url) {
|
||||
return fmt.Errorf("jsonschema: unsupported vocab %q in %s", url, res)
|
||||
}
|
||||
s.vocab = append(s.vocab, url)
|
||||
}
|
||||
} else {
|
||||
s.vocab = r.draft.defaultVocab
|
||||
}
|
||||
}
|
||||
|
||||
if ref, ok := m["$recursiveRef"]; ok {
|
||||
s.RecursiveRef, err = c.compileRef(r, stack, "$recursiveRef", res, ref.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if r.draft.version >= 2020 {
|
||||
if dref, ok := m["$dynamicRef"]; ok {
|
||||
s.DynamicRef, err = c.compileRef(r, stack, "$dynamicRef", res, dref.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if dref, ok := dref.(string); ok {
|
||||
_, frag := split(dref)
|
||||
if frag != "#" && !strings.HasPrefix(frag, "#/") {
|
||||
// frag is anchor
|
||||
s.dynamicRefAnchor = frag[1:]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loadInt := func(pname string) int {
|
||||
if num, ok := m[pname]; ok {
|
||||
i, _ := num.(json.Number).Float64()
|
||||
return int(i)
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
loadRat := func(pname string) *big.Rat {
|
||||
if num, ok := m[pname]; ok {
|
||||
r, _ := new(big.Rat).SetString(string(num.(json.Number)))
|
||||
return r
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if r.draft.version < 2019 || r.schema.meta.hasVocab("validation") {
|
||||
if t, ok := m["type"]; ok {
|
||||
switch t := t.(type) {
|
||||
case string:
|
||||
s.Types = []string{t}
|
||||
case []interface{}:
|
||||
s.Types = toStrings(t)
|
||||
}
|
||||
}
|
||||
|
||||
if e, ok := m["enum"]; ok {
|
||||
s.Enum = e.([]interface{})
|
||||
allPrimitives := true
|
||||
for _, item := range s.Enum {
|
||||
switch jsonType(item) {
|
||||
case "object", "array":
|
||||
allPrimitives = false
|
||||
break
|
||||
}
|
||||
}
|
||||
s.enumError = "enum failed"
|
||||
if allPrimitives {
|
||||
if len(s.Enum) == 1 {
|
||||
s.enumError = fmt.Sprintf("value must be %#v", s.Enum[0])
|
||||
} else {
|
||||
strEnum := make([]string, len(s.Enum))
|
||||
for i, item := range s.Enum {
|
||||
strEnum[i] = fmt.Sprintf("%#v", item)
|
||||
}
|
||||
s.enumError = fmt.Sprintf("value must be one of %s", strings.Join(strEnum, ", "))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
s.Minimum = loadRat("minimum")
|
||||
if exclusive, ok := m["exclusiveMinimum"]; ok {
|
||||
if exclusive, ok := exclusive.(bool); ok {
|
||||
if exclusive {
|
||||
s.Minimum, s.ExclusiveMinimum = nil, s.Minimum
|
||||
}
|
||||
} else {
|
||||
s.ExclusiveMinimum = loadRat("exclusiveMinimum")
|
||||
}
|
||||
}
|
||||
|
||||
s.Maximum = loadRat("maximum")
|
||||
if exclusive, ok := m["exclusiveMaximum"]; ok {
|
||||
if exclusive, ok := exclusive.(bool); ok {
|
||||
if exclusive {
|
||||
s.Maximum, s.ExclusiveMaximum = nil, s.Maximum
|
||||
}
|
||||
} else {
|
||||
s.ExclusiveMaximum = loadRat("exclusiveMaximum")
|
||||
}
|
||||
}
|
||||
|
||||
s.MultipleOf = loadRat("multipleOf")
|
||||
|
||||
s.MinProperties, s.MaxProperties = loadInt("minProperties"), loadInt("maxProperties")
|
||||
|
||||
if req, ok := m["required"]; ok {
|
||||
s.Required = toStrings(req.([]interface{}))
|
||||
}
|
||||
|
||||
s.MinItems, s.MaxItems = loadInt("minItems"), loadInt("maxItems")
|
||||
|
||||
if unique, ok := m["uniqueItems"]; ok {
|
||||
s.UniqueItems = unique.(bool)
|
||||
}
|
||||
|
||||
s.MinLength, s.MaxLength = loadInt("minLength"), loadInt("maxLength")
|
||||
|
||||
if pattern, ok := m["pattern"]; ok {
|
||||
s.Pattern = regexp.MustCompile(pattern.(string))
|
||||
}
|
||||
|
||||
if r.draft.version >= 2019 {
|
||||
s.MinContains, s.MaxContains = loadInt("minContains"), loadInt("maxContains")
|
||||
if s.MinContains == -1 {
|
||||
s.MinContains = 1
|
||||
}
|
||||
|
||||
if deps, ok := m["dependentRequired"]; ok {
|
||||
deps := deps.(map[string]interface{})
|
||||
s.DependentRequired = make(map[string][]string, len(deps))
|
||||
for pname, pvalue := range deps {
|
||||
s.DependentRequired[pname] = toStrings(pvalue.([]interface{}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
compile := func(stack []schemaRef, ptr string) (*Schema, error) {
|
||||
return c.compileRef(r, stack, ptr, res, r.url+res.floc+"/"+ptr)
|
||||
}
|
||||
|
||||
loadSchema := func(pname string, stack []schemaRef) (*Schema, error) {
|
||||
if _, ok := m[pname]; ok {
|
||||
return compile(stack, escape(pname))
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
loadSchemas := func(pname string, stack []schemaRef) ([]*Schema, error) {
|
||||
if pvalue, ok := m[pname]; ok {
|
||||
pvalue := pvalue.([]interface{})
|
||||
schemas := make([]*Schema, len(pvalue))
|
||||
for i := range pvalue {
|
||||
sch, err := compile(stack, escape(pname)+"/"+strconv.Itoa(i))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
schemas[i] = sch
|
||||
}
|
||||
return schemas, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if r.draft.version < 2019 || r.schema.meta.hasVocab("applicator") {
|
||||
if s.Not, err = loadSchema("not", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.AllOf, err = loadSchemas("allOf", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.AnyOf, err = loadSchemas("anyOf", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.OneOf, err = loadSchemas("oneOf", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if props, ok := m["properties"]; ok {
|
||||
props := props.(map[string]interface{})
|
||||
s.Properties = make(map[string]*Schema, len(props))
|
||||
for pname := range props {
|
||||
s.Properties[pname], err = compile(nil, "properties/"+escape(pname))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if regexProps, ok := m["regexProperties"]; ok {
|
||||
s.RegexProperties = regexProps.(bool)
|
||||
}
|
||||
|
||||
if patternProps, ok := m["patternProperties"]; ok {
|
||||
patternProps := patternProps.(map[string]interface{})
|
||||
s.PatternProperties = make(map[*regexp.Regexp]*Schema, len(patternProps))
|
||||
for pattern := range patternProps {
|
||||
s.PatternProperties[regexp.MustCompile(pattern)], err = compile(nil, "patternProperties/"+escape(pattern))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if additionalProps, ok := m["additionalProperties"]; ok {
|
||||
switch additionalProps := additionalProps.(type) {
|
||||
case bool:
|
||||
s.AdditionalProperties = additionalProps
|
||||
case map[string]interface{}:
|
||||
s.AdditionalProperties, err = compile(nil, "additionalProperties")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if deps, ok := m["dependencies"]; ok {
|
||||
deps := deps.(map[string]interface{})
|
||||
s.Dependencies = make(map[string]interface{}, len(deps))
|
||||
for pname, pvalue := range deps {
|
||||
switch pvalue := pvalue.(type) {
|
||||
case []interface{}:
|
||||
s.Dependencies[pname] = toStrings(pvalue)
|
||||
default:
|
||||
s.Dependencies[pname], err = compile(stack, "dependencies/"+escape(pname))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 6 {
|
||||
if s.PropertyNames, err = loadSchema("propertyNames", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Contains, err = loadSchema("contains", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 7 {
|
||||
if m["if"] != nil {
|
||||
if s.If, err = loadSchema("if", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Then, err = loadSchema("then", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Else, err = loadSchema("else", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if r.draft.version >= 2019 {
|
||||
if deps, ok := m["dependentSchemas"]; ok {
|
||||
deps := deps.(map[string]interface{})
|
||||
s.DependentSchemas = make(map[string]*Schema, len(deps))
|
||||
for pname := range deps {
|
||||
s.DependentSchemas[pname], err = compile(stack, "dependentSchemas/"+escape(pname))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 2020 {
|
||||
if s.PrefixItems, err = loadSchemas("prefixItems", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Items2020, err = loadSchema("items", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if items, ok := m["items"]; ok {
|
||||
switch items.(type) {
|
||||
case []interface{}:
|
||||
s.Items, err = loadSchemas("items", nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if additionalItems, ok := m["additionalItems"]; ok {
|
||||
switch additionalItems := additionalItems.(type) {
|
||||
case bool:
|
||||
s.AdditionalItems = additionalItems
|
||||
case map[string]interface{}:
|
||||
s.AdditionalItems, err = compile(nil, "additionalItems")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
default:
|
||||
s.Items, err = compile(nil, "items")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// unevaluatedXXX keywords were in "applicator" vocab in 2019, but moved to new vocab "unevaluated" in 2020
|
||||
if (r.draft.version == 2019 && r.schema.meta.hasVocab("applicator")) || (r.draft.version >= 2020 && r.schema.meta.hasVocab("unevaluated")) {
|
||||
if s.UnevaluatedProperties, err = loadSchema("unevaluatedProperties", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.UnevaluatedItems, err = loadSchema("unevaluatedItems", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if r.draft.version >= 2020 {
|
||||
// any item in an array that passes validation of the contains schema is considered "evaluated"
|
||||
s.ContainsEval = true
|
||||
}
|
||||
}
|
||||
|
||||
if format, ok := m["format"]; ok {
|
||||
s.Format = format.(string)
|
||||
if r.draft.version < 2019 || c.AssertFormat || r.schema.meta.hasVocab("format-assertion") {
|
||||
if format, ok := c.Formats[s.Format]; ok {
|
||||
s.format = format
|
||||
} else {
|
||||
s.format, _ = Formats[s.Format]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.ExtractAnnotations {
|
||||
if title, ok := m["title"]; ok {
|
||||
s.Title = title.(string)
|
||||
}
|
||||
if description, ok := m["description"]; ok {
|
||||
s.Description = description.(string)
|
||||
}
|
||||
s.Default = m["default"]
|
||||
}
|
||||
|
||||
if r.draft.version >= 6 {
|
||||
if c, ok := m["const"]; ok {
|
||||
s.Constant = []interface{}{c}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 7 {
|
||||
if encoding, ok := m["contentEncoding"]; ok {
|
||||
s.ContentEncoding = encoding.(string)
|
||||
if decoder, ok := c.Decoders[s.ContentEncoding]; ok {
|
||||
s.decoder = decoder
|
||||
} else {
|
||||
s.decoder, _ = Decoders[s.ContentEncoding]
|
||||
}
|
||||
}
|
||||
if mediaType, ok := m["contentMediaType"]; ok {
|
||||
s.ContentMediaType = mediaType.(string)
|
||||
if mediaType, ok := c.MediaTypes[s.ContentMediaType]; ok {
|
||||
s.mediaType = mediaType
|
||||
} else {
|
||||
s.mediaType, _ = MediaTypes[s.ContentMediaType]
|
||||
}
|
||||
if s.ContentSchema, err = loadSchema("contentSchema", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if c.ExtractAnnotations {
|
||||
if comment, ok := m["$comment"]; ok {
|
||||
s.Comment = comment.(string)
|
||||
}
|
||||
if readOnly, ok := m["readOnly"]; ok {
|
||||
s.ReadOnly = readOnly.(bool)
|
||||
}
|
||||
if writeOnly, ok := m["writeOnly"]; ok {
|
||||
s.WriteOnly = writeOnly.(bool)
|
||||
}
|
||||
if examples, ok := m["examples"]; ok {
|
||||
s.Examples = examples.([]interface{})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 2019 {
|
||||
if !c.AssertContent {
|
||||
s.decoder = nil
|
||||
s.mediaType = nil
|
||||
s.ContentSchema = nil
|
||||
}
|
||||
if c.ExtractAnnotations {
|
||||
if deprecated, ok := m["deprecated"]; ok {
|
||||
s.Deprecated = deprecated.(bool)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for name, ext := range c.extensions {
|
||||
es, err := ext.compiler.Compile(CompilerContext{c, r, stack, res}, m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if es != nil {
|
||||
if s.Extensions == nil {
|
||||
s.Extensions = make(map[string]ExtSchema)
|
||||
}
|
||||
s.Extensions[name] = es
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Compiler) validateSchema(r *resource, v interface{}, vloc string) error {
|
||||
validate := func(meta *Schema) error {
|
||||
if meta == nil {
|
||||
return nil
|
||||
}
|
||||
return meta.validateValue(v, vloc)
|
||||
}
|
||||
|
||||
if err := validate(r.draft.meta); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, ext := range c.extensions {
|
||||
if err := validate(ext.meta); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func toStrings(arr []interface{}) []string {
|
||||
s := make([]string, len(arr))
|
||||
for i, v := range arr {
|
||||
s[i] = v.(string)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// SchemaRef captures schema and the path referring to it.
|
||||
type schemaRef struct {
|
||||
path string // relative-json-pointer to schema
|
||||
schema *Schema // target schema
|
||||
discard bool // true when scope left
|
||||
}
|
||||
|
||||
func (sr schemaRef) String() string {
|
||||
return fmt.Sprintf("(%s)%v", sr.path, sr.schema)
|
||||
}
|
||||
|
||||
func checkLoop(stack []schemaRef, sref schemaRef) error {
|
||||
for _, ref := range stack {
|
||||
if ref.schema == sref.schema {
|
||||
return infiniteLoopError(stack, sref)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func keywordLocation(stack []schemaRef, path string) string {
|
||||
var loc string
|
||||
for _, ref := range stack[1:] {
|
||||
loc += "/" + ref.path
|
||||
}
|
||||
if path != "" {
|
||||
loc = loc + "/" + path
|
||||
}
|
||||
return loc
|
||||
}
|
||||
29
vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
generated
vendored
29
vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
generated
vendored
|
|
@ -1,29 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
// Decoders is a registry of functions, which know how to decode
|
||||
// string encoded in specific format.
|
||||
//
|
||||
// New Decoders can be registered by adding to this map. Key is encoding name,
|
||||
// value is function that knows how to decode string in that format.
|
||||
var Decoders = map[string]func(string) ([]byte, error){
|
||||
"base64": base64.StdEncoding.DecodeString,
|
||||
}
|
||||
|
||||
// MediaTypes is a registry of functions, which know how to validate
|
||||
// whether the bytes represent data of that mediaType.
|
||||
//
|
||||
// New mediaTypes can be registered by adding to this map. Key is mediaType name,
|
||||
// value is function that knows how to validate that mediaType.
|
||||
var MediaTypes = map[string]func([]byte) error{
|
||||
"application/json": validateJSON,
|
||||
}
|
||||
|
||||
func validateJSON(b []byte) error {
|
||||
var v interface{}
|
||||
return json.Unmarshal(b, &v)
|
||||
}
|
||||
49
vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
generated
vendored
49
vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
generated
vendored
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
Package jsonschema provides json-schema compilation and validation.
|
||||
|
||||
Features:
|
||||
- implements draft 2020-12, 2019-09, draft-7, draft-6, draft-4
|
||||
- fully compliant with JSON-Schema-Test-Suite, (excluding some optional)
|
||||
- list of optional tests that are excluded can be found in schema_test.go(variable skipTests)
|
||||
- validates schemas against meta-schema
|
||||
- full support of remote references
|
||||
- support of recursive references between schemas
|
||||
- detects infinite loop in schemas
|
||||
- thread safe validation
|
||||
- rich, intuitive hierarchial error messages with json-pointers to exact location
|
||||
- supports output formats flag, basic and detailed
|
||||
- supports enabling format and content Assertions in draft2019-09 or above
|
||||
- change Compiler.AssertFormat, Compiler.AssertContent to true
|
||||
- compiled schema can be introspected. easier to develop tools like generating go structs given schema
|
||||
- supports user-defined keywords via extensions
|
||||
- implements following formats (supports user-defined)
|
||||
- date-time, date, time, duration (supports leap-second)
|
||||
- uuid, hostname, email
|
||||
- ip-address, ipv4, ipv6
|
||||
- uri, uriref, uri-template(limited validation)
|
||||
- json-pointer, relative-json-pointer
|
||||
- regex, format
|
||||
- implements following contentEncoding (supports user-defined)
|
||||
- base64
|
||||
- implements following contentMediaType (supports user-defined)
|
||||
- application/json
|
||||
- can load from files/http/https/string/[]byte/io.Reader (supports user-defined)
|
||||
|
||||
The schema is compiled against the version specified in "$schema" property.
|
||||
If "$schema" property is missing, it uses latest draft which currently implemented
|
||||
by this library.
|
||||
|
||||
You can force to use specific draft, when "$schema" is missing, as follows:
|
||||
|
||||
compiler := jsonschema.NewCompiler()
|
||||
compiler.Draft = jsonschema.Draft4
|
||||
|
||||
This package supports loading json-schema from filePath and fileURL.
|
||||
|
||||
To load json-schema from HTTPURL, add following import:
|
||||
|
||||
import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
|
||||
you can validate yaml documents. see https://play.golang.org/p/sJy1qY7dXgA
|
||||
*/
|
||||
package jsonschema
|
||||
1454
vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
generated
vendored
1454
vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
generated
vendored
File diff suppressed because it is too large
Load diff
129
vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
generated
vendored
129
vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
generated
vendored
|
|
@ -1,129 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// InvalidJSONTypeError is the error type returned by ValidateInterface.
|
||||
// this tells that specified go object is not valid jsonType.
|
||||
type InvalidJSONTypeError string
|
||||
|
||||
func (e InvalidJSONTypeError) Error() string {
|
||||
return fmt.Sprintf("jsonschema: invalid jsonType: %s", string(e))
|
||||
}
|
||||
|
||||
// InfiniteLoopError is returned by Compile/Validate.
|
||||
// this gives url#keywordLocation that lead to infinity loop.
|
||||
type InfiniteLoopError string
|
||||
|
||||
func (e InfiniteLoopError) Error() string {
|
||||
return "jsonschema: infinite loop " + string(e)
|
||||
}
|
||||
|
||||
func infiniteLoopError(stack []schemaRef, sref schemaRef) InfiniteLoopError {
|
||||
var path string
|
||||
for _, ref := range stack {
|
||||
if path == "" {
|
||||
path += ref.schema.Location
|
||||
} else {
|
||||
path += "/" + ref.path
|
||||
}
|
||||
}
|
||||
return InfiniteLoopError(path + "/" + sref.path)
|
||||
}
|
||||
|
||||
// SchemaError is the error type returned by Compile.
|
||||
type SchemaError struct {
|
||||
// SchemaURL is the url to json-schema that filed to compile.
|
||||
// This is helpful, if your schema refers to external schemas
|
||||
SchemaURL string
|
||||
|
||||
// Err is the error that occurred during compilation.
|
||||
// It could be ValidationError, because compilation validates
|
||||
// given schema against the json meta-schema
|
||||
Err error
|
||||
}
|
||||
|
||||
func (se *SchemaError) Unwrap() error {
|
||||
return se.Err
|
||||
}
|
||||
|
||||
func (se *SchemaError) Error() string {
|
||||
s := fmt.Sprintf("jsonschema %s compilation failed", se.SchemaURL)
|
||||
if se.Err != nil {
|
||||
return fmt.Sprintf("%s: %v", s, strings.TrimPrefix(se.Err.Error(), "jsonschema: "))
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (se *SchemaError) GoString() string {
|
||||
if _, ok := se.Err.(*ValidationError); ok {
|
||||
return fmt.Sprintf("jsonschema %s compilation failed\n%#v", se.SchemaURL, se.Err)
|
||||
}
|
||||
return se.Error()
|
||||
}
|
||||
|
||||
// ValidationError is the error type returned by Validate.
|
||||
type ValidationError struct {
|
||||
KeywordLocation string // validation path of validating keyword or schema
|
||||
AbsoluteKeywordLocation string // absolute location of validating keyword or schema
|
||||
InstanceLocation string // location of the json value within the instance being validated
|
||||
Message string // describes error
|
||||
Causes []*ValidationError // nested validation errors
|
||||
}
|
||||
|
||||
func (ve *ValidationError) add(causes ...error) error {
|
||||
for _, cause := range causes {
|
||||
ve.Causes = append(ve.Causes, cause.(*ValidationError))
|
||||
}
|
||||
return ve
|
||||
}
|
||||
|
||||
func (ve *ValidationError) causes(err error) error {
|
||||
if err := err.(*ValidationError); err.Message == "" {
|
||||
ve.Causes = err.Causes
|
||||
} else {
|
||||
ve.add(err)
|
||||
}
|
||||
return ve
|
||||
}
|
||||
|
||||
func (ve *ValidationError) Error() string {
|
||||
leaf := ve
|
||||
for len(leaf.Causes) > 0 {
|
||||
leaf = leaf.Causes[0]
|
||||
}
|
||||
u, _ := split(ve.AbsoluteKeywordLocation)
|
||||
return fmt.Sprintf("jsonschema: %s does not validate with %s: %s", quote(leaf.InstanceLocation), u+"#"+leaf.KeywordLocation, leaf.Message)
|
||||
}
|
||||
|
||||
func (ve *ValidationError) GoString() string {
|
||||
sloc := ve.AbsoluteKeywordLocation
|
||||
sloc = sloc[strings.IndexByte(sloc, '#')+1:]
|
||||
msg := fmt.Sprintf("[I#%s] [S#%s] %s", ve.InstanceLocation, sloc, ve.Message)
|
||||
for _, c := range ve.Causes {
|
||||
for _, line := range strings.Split(c.GoString(), "\n") {
|
||||
msg += "\n " + line
|
||||
}
|
||||
}
|
||||
return msg
|
||||
}
|
||||
|
||||
func joinPtr(ptr1, ptr2 string) string {
|
||||
if len(ptr1) == 0 {
|
||||
return ptr2
|
||||
}
|
||||
if len(ptr2) == 0 {
|
||||
return ptr1
|
||||
}
|
||||
return ptr1 + "/" + ptr2
|
||||
}
|
||||
|
||||
// quote returns single-quoted string
|
||||
func quote(s string) string {
|
||||
s = fmt.Sprintf("%q", s)
|
||||
s = strings.ReplaceAll(s, `\"`, `"`)
|
||||
s = strings.ReplaceAll(s, `'`, `\'`)
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
116
vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
generated
vendored
116
vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
generated
vendored
|
|
@ -1,116 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
// ExtCompiler compiles custom keyword(s) into ExtSchema.
|
||||
type ExtCompiler interface {
|
||||
// Compile compiles the custom keywords in schema m and returns its compiled representation.
|
||||
// if the schema m does not contain the keywords defined by this extension,
|
||||
// compiled representation nil should be returned.
|
||||
Compile(ctx CompilerContext, m map[string]interface{}) (ExtSchema, error)
|
||||
}
|
||||
|
||||
// ExtSchema is schema representation of custom keyword(s)
|
||||
type ExtSchema interface {
|
||||
// Validate validates the json value v with this ExtSchema.
|
||||
// Returned error must be *ValidationError.
|
||||
Validate(ctx ValidationContext, v interface{}) error
|
||||
}
|
||||
|
||||
type extension struct {
|
||||
meta *Schema
|
||||
compiler ExtCompiler
|
||||
}
|
||||
|
||||
// RegisterExtension registers custom keyword(s) into this compiler.
|
||||
//
|
||||
// name is extension name, used only to avoid name collisions.
|
||||
// meta captures the metaschema for the new keywords.
|
||||
// This is used to validate the schema before calling ext.Compile.
|
||||
func (c *Compiler) RegisterExtension(name string, meta *Schema, ext ExtCompiler) {
|
||||
c.extensions[name] = extension{meta, ext}
|
||||
}
|
||||
|
||||
// CompilerContext ---
|
||||
|
||||
// CompilerContext provides additional context required in compiling for extension.
|
||||
type CompilerContext struct {
|
||||
c *Compiler
|
||||
r *resource
|
||||
stack []schemaRef
|
||||
res *resource
|
||||
}
|
||||
|
||||
// Compile compiles given value at ptr into *Schema. This is useful in implementing
|
||||
// keyword like allOf/not/patternProperties.
|
||||
//
|
||||
// schPath is the relative-json-pointer to the schema to be compiled from parent schema.
|
||||
//
|
||||
// applicableOnSameInstance tells whether current schema and the given schema
|
||||
// are applied on same instance value. this is used to detect infinite loop in schema.
|
||||
func (ctx CompilerContext) Compile(schPath string, applicableOnSameInstance bool) (*Schema, error) {
|
||||
var stack []schemaRef
|
||||
if applicableOnSameInstance {
|
||||
stack = ctx.stack
|
||||
}
|
||||
return ctx.c.compileRef(ctx.r, stack, schPath, ctx.res, ctx.r.url+ctx.res.floc+"/"+schPath)
|
||||
}
|
||||
|
||||
// CompileRef compiles the schema referenced by ref uri
|
||||
//
|
||||
// refPath is the relative-json-pointer to ref.
|
||||
//
|
||||
// applicableOnSameInstance tells whether current schema and the given schema
|
||||
// are applied on same instance value. this is used to detect infinite loop in schema.
|
||||
func (ctx CompilerContext) CompileRef(ref string, refPath string, applicableOnSameInstance bool) (*Schema, error) {
|
||||
var stack []schemaRef
|
||||
if applicableOnSameInstance {
|
||||
stack = ctx.stack
|
||||
}
|
||||
return ctx.c.compileRef(ctx.r, stack, refPath, ctx.res, ref)
|
||||
}
|
||||
|
||||
// ValidationContext ---
|
||||
|
||||
// ValidationContext provides additional context required in validating for extension.
|
||||
type ValidationContext struct {
|
||||
result validationResult
|
||||
validate func(sch *Schema, schPath string, v interface{}, vpath string) error
|
||||
validateInplace func(sch *Schema, schPath string) error
|
||||
validationError func(keywordPath string, format string, a ...interface{}) *ValidationError
|
||||
}
|
||||
|
||||
// EvaluatedProp marks given property of object as evaluated.
|
||||
func (ctx ValidationContext) EvaluatedProp(prop string) {
|
||||
delete(ctx.result.unevalProps, prop)
|
||||
}
|
||||
|
||||
// EvaluatedItem marks given index of array as evaluated.
|
||||
func (ctx ValidationContext) EvaluatedItem(index int) {
|
||||
delete(ctx.result.unevalItems, index)
|
||||
}
|
||||
|
||||
// Validate validates schema s with value v. Extension must use this method instead of
|
||||
// *Schema.ValidateInterface method. This will be useful in implementing keywords like
|
||||
// allOf/oneOf
|
||||
//
|
||||
// spath is relative-json-pointer to s
|
||||
// vpath is relative-json-pointer to v.
|
||||
func (ctx ValidationContext) Validate(s *Schema, spath string, v interface{}, vpath string) error {
|
||||
if vpath == "" {
|
||||
return ctx.validateInplace(s, spath)
|
||||
}
|
||||
return ctx.validate(s, spath, v, vpath)
|
||||
}
|
||||
|
||||
// Error used to construct validation error by extensions.
|
||||
//
|
||||
// keywordPath is relative-json-pointer to keyword.
|
||||
func (ctx ValidationContext) Error(keywordPath string, format string, a ...interface{}) *ValidationError {
|
||||
return ctx.validationError(keywordPath, format, a...)
|
||||
}
|
||||
|
||||
// Group is used by extensions to group multiple errors as causes to parent error.
|
||||
// This is useful in implementing keywords like allOf where each schema specified
|
||||
// in allOf can result a validationError.
|
||||
func (ValidationError) Group(parent *ValidationError, causes ...error) error {
|
||||
return parent.add(causes...)
|
||||
}
|
||||
567
vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
generated
vendored
567
vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
generated
vendored
|
|
@ -1,567 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net"
|
||||
"net/mail"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Formats is a registry of functions, which know how to validate
|
||||
// a specific format.
|
||||
//
|
||||
// New Formats can be registered by adding to this map. Key is format name,
|
||||
// value is function that knows how to validate that format.
|
||||
var Formats = map[string]func(interface{}) bool{
|
||||
"date-time": isDateTime,
|
||||
"date": isDate,
|
||||
"time": isTime,
|
||||
"duration": isDuration,
|
||||
"period": isPeriod,
|
||||
"hostname": isHostname,
|
||||
"email": isEmail,
|
||||
"ip-address": isIPV4,
|
||||
"ipv4": isIPV4,
|
||||
"ipv6": isIPV6,
|
||||
"uri": isURI,
|
||||
"iri": isURI,
|
||||
"uri-reference": isURIReference,
|
||||
"uriref": isURIReference,
|
||||
"iri-reference": isURIReference,
|
||||
"uri-template": isURITemplate,
|
||||
"regex": isRegex,
|
||||
"json-pointer": isJSONPointer,
|
||||
"relative-json-pointer": isRelativeJSONPointer,
|
||||
"uuid": isUUID,
|
||||
}
|
||||
|
||||
// isDateTime tells whether given string is a valid date representation
|
||||
// as defined by RFC 3339, section 5.6.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
|
||||
func isDateTime(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if len(s) < 20 { // yyyy-mm-ddThh:mm:ssZ
|
||||
return false
|
||||
}
|
||||
if s[10] != 'T' && s[10] != 't' {
|
||||
return false
|
||||
}
|
||||
return isDate(s[:10]) && isTime(s[11:])
|
||||
}
|
||||
|
||||
// isDate tells whether given string is a valid full-date production
|
||||
// as defined by RFC 3339, section 5.6.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
|
||||
func isDate(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
_, err := time.Parse("2006-01-02", s)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// isTime tells whether given string is a valid full-time production
|
||||
// as defined by RFC 3339, section 5.6.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
|
||||
func isTime(v interface{}) bool {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
|
||||
// golang time package does not support leap seconds.
|
||||
// so we are parsing it manually here.
|
||||
|
||||
// hh:mm:ss
|
||||
// 01234567
|
||||
if len(str) < 9 || str[2] != ':' || str[5] != ':' {
|
||||
return false
|
||||
}
|
||||
isInRange := func(str string, min, max int) (int, bool) {
|
||||
n, err := strconv.Atoi(str)
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
if n < min || n > max {
|
||||
return 0, false
|
||||
}
|
||||
return n, true
|
||||
}
|
||||
var h, m, s int
|
||||
if h, ok = isInRange(str[0:2], 0, 23); !ok {
|
||||
return false
|
||||
}
|
||||
if m, ok = isInRange(str[3:5], 0, 59); !ok {
|
||||
return false
|
||||
}
|
||||
if s, ok = isInRange(str[6:8], 0, 60); !ok {
|
||||
return false
|
||||
}
|
||||
str = str[8:]
|
||||
|
||||
// parse secfrac if present
|
||||
if str[0] == '.' {
|
||||
// dot following more than one digit
|
||||
str = str[1:]
|
||||
var numDigits int
|
||||
for str != "" {
|
||||
if str[0] < '0' || str[0] > '9' {
|
||||
break
|
||||
}
|
||||
numDigits++
|
||||
str = str[1:]
|
||||
}
|
||||
if numDigits == 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if len(str) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
if str[0] == 'z' || str[0] == 'Z' {
|
||||
if len(str) != 1 {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
// time-numoffset
|
||||
// +hh:mm
|
||||
// 012345
|
||||
if len(str) != 6 || str[3] != ':' {
|
||||
return false
|
||||
}
|
||||
|
||||
var sign int
|
||||
if str[0] == '+' {
|
||||
sign = -1
|
||||
} else if str[0] == '-' {
|
||||
sign = +1
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
|
||||
var zh, zm int
|
||||
if zh, ok = isInRange(str[1:3], 0, 23); !ok {
|
||||
return false
|
||||
}
|
||||
if zm, ok = isInRange(str[4:6], 0, 59); !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
// apply timezone offset
|
||||
hm := (h*60 + m) + sign*(zh*60+zm)
|
||||
if hm < 0 {
|
||||
hm += 24 * 60
|
||||
}
|
||||
h, m = hm/60, hm%60
|
||||
}
|
||||
|
||||
// check leapsecond
|
||||
if s == 60 { // leap second
|
||||
if h != 23 || m != 59 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// isDuration tells whether given string is a valid duration format
|
||||
// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
|
||||
func isDuration(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if len(s) == 0 || s[0] != 'P' {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
parseUnits := func() (units string, ok bool) {
|
||||
for len(s) > 0 && s[0] != 'T' {
|
||||
digits := false
|
||||
for {
|
||||
if len(s) == 0 {
|
||||
break
|
||||
}
|
||||
if s[0] < '0' || s[0] > '9' {
|
||||
break
|
||||
}
|
||||
digits = true
|
||||
s = s[1:]
|
||||
}
|
||||
if !digits || len(s) == 0 {
|
||||
return units, false
|
||||
}
|
||||
units += s[:1]
|
||||
s = s[1:]
|
||||
}
|
||||
return units, true
|
||||
}
|
||||
units, ok := parseUnits()
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
if units == "W" {
|
||||
return len(s) == 0 // P_W
|
||||
}
|
||||
if len(units) > 0 {
|
||||
if strings.Index("YMD", units) == -1 {
|
||||
return false
|
||||
}
|
||||
if len(s) == 0 {
|
||||
return true // "P" dur-date
|
||||
}
|
||||
}
|
||||
if len(s) == 0 || s[0] != 'T' {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
units, ok = parseUnits()
|
||||
return ok && len(s) == 0 && len(units) > 0 && strings.Index("HMS", units) != -1
|
||||
}
|
||||
|
||||
// isPeriod tells whether given string is a valid period format
|
||||
// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
|
||||
func isPeriod(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
slash := strings.IndexByte(s, '/')
|
||||
if slash == -1 {
|
||||
return false
|
||||
}
|
||||
start, end := s[:slash], s[slash+1:]
|
||||
if isDateTime(start) {
|
||||
return isDateTime(end) || isDuration(end)
|
||||
}
|
||||
return isDuration(start) && isDateTime(end)
|
||||
}
|
||||
|
||||
// isHostname tells whether given string is a valid representation
|
||||
// for an Internet host name, as defined by RFC 1034 section 3.1 and
|
||||
// RFC 1123 section 2.1.
|
||||
//
|
||||
// See https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names, for details.
|
||||
func isHostname(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
// entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters
|
||||
s = strings.TrimSuffix(s, ".")
|
||||
if len(s) > 253 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Hostnames are composed of series of labels concatenated with dots, as are all domain names
|
||||
for _, label := range strings.Split(s, ".") {
|
||||
// Each label must be from 1 to 63 characters long
|
||||
if labelLen := len(label); labelLen < 1 || labelLen > 63 {
|
||||
return false
|
||||
}
|
||||
|
||||
// labels must not start with a hyphen
|
||||
// RFC 1123 section 2.1: restriction on the first character
|
||||
// is relaxed to allow either a letter or a digit
|
||||
if first := s[0]; first == '-' {
|
||||
return false
|
||||
}
|
||||
|
||||
// must not end with a hyphen
|
||||
if label[len(label)-1] == '-' {
|
||||
return false
|
||||
}
|
||||
|
||||
// labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner),
|
||||
// the digits '0' through '9', and the hyphen ('-')
|
||||
for _, c := range label {
|
||||
if valid := (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '-'); !valid {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// isEmail tells whether given string is a valid Internet email address
|
||||
// as defined by RFC 5322, section 3.4.1.
|
||||
//
|
||||
// See https://en.wikipedia.org/wiki/Email_address, for details.
|
||||
func isEmail(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
// entire email address to be no more than 254 characters long
|
||||
if len(s) > 254 {
|
||||
return false
|
||||
}
|
||||
|
||||
// email address is generally recognized as having two parts joined with an at-sign
|
||||
at := strings.LastIndexByte(s, '@')
|
||||
if at == -1 {
|
||||
return false
|
||||
}
|
||||
local := s[0:at]
|
||||
domain := s[at+1:]
|
||||
|
||||
// local part may be up to 64 characters long
|
||||
if len(local) > 64 {
|
||||
return false
|
||||
}
|
||||
|
||||
// domain if enclosed in brackets, must match an IP address
|
||||
if len(domain) >= 2 && domain[0] == '[' && domain[len(domain)-1] == ']' {
|
||||
ip := domain[1 : len(domain)-1]
|
||||
if strings.HasPrefix(ip, "IPv6:") {
|
||||
return isIPV6(strings.TrimPrefix(ip, "IPv6:"))
|
||||
}
|
||||
return isIPV4(ip)
|
||||
}
|
||||
|
||||
// domain must match the requirements for a hostname
|
||||
if !isHostname(domain) {
|
||||
return false
|
||||
}
|
||||
|
||||
_, err := mail.ParseAddress(s)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// isIPV4 tells whether given string is a valid representation of an IPv4 address
|
||||
// according to the "dotted-quad" ABNF syntax as defined in RFC 2673, section 3.2.
|
||||
func isIPV4(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
groups := strings.Split(s, ".")
|
||||
if len(groups) != 4 {
|
||||
return false
|
||||
}
|
||||
for _, group := range groups {
|
||||
n, err := strconv.Atoi(group)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
if n < 0 || n > 255 {
|
||||
return false
|
||||
}
|
||||
if n != 0 && group[0] == '0' {
|
||||
return false // leading zeroes should be rejected, as they are treated as octals
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isIPV6 tells whether given string is a valid representation of an IPv6 address
|
||||
// as defined in RFC 2373, section 2.2.
|
||||
func isIPV6(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if !strings.Contains(s, ":") {
|
||||
return false
|
||||
}
|
||||
return net.ParseIP(s) != nil
|
||||
}
|
||||
|
||||
// isURI tells whether given string is valid URI, according to RFC 3986.
|
||||
func isURI(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
u, err := urlParse(s)
|
||||
return err == nil && u.IsAbs()
|
||||
}
|
||||
|
||||
func urlParse(s string) (*url.URL, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if hostname is ipv6, validate it
|
||||
hostname := u.Hostname()
|
||||
if strings.IndexByte(hostname, ':') != -1 {
|
||||
if strings.IndexByte(u.Host, '[') == -1 || strings.IndexByte(u.Host, ']') == -1 {
|
||||
return nil, errors.New("ipv6 address is not enclosed in brackets")
|
||||
}
|
||||
if !isIPV6(hostname) {
|
||||
return nil, errors.New("invalid ipv6 address")
|
||||
}
|
||||
}
|
||||
return u, nil
|
||||
}
|
||||
|
||||
// isURIReference tells whether given string is a valid URI Reference
|
||||
// (either a URI or a relative-reference), according to RFC 3986.
|
||||
func isURIReference(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
_, err := urlParse(s)
|
||||
return err == nil && !strings.Contains(s, `\`)
|
||||
}
|
||||
|
||||
// isURITemplate tells whether given string is a valid URI Template
|
||||
// according to RFC6570.
|
||||
//
|
||||
// Current implementation does minimal validation.
|
||||
func isURITemplate(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
u, err := urlParse(s)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
for _, item := range strings.Split(u.RawPath, "/") {
|
||||
depth := 0
|
||||
for _, ch := range item {
|
||||
switch ch {
|
||||
case '{':
|
||||
depth++
|
||||
if depth != 1 {
|
||||
return false
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
if depth != 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
if depth != 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isRegex tells whether given string is a valid regular expression,
|
||||
// according to the ECMA 262 regular expression dialect.
|
||||
//
|
||||
// The implementation uses go-lang regexp package.
|
||||
func isRegex(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
_, err := regexp.Compile(s)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// isJSONPointer tells whether given string is a valid JSON Pointer.
|
||||
//
|
||||
// Note: It returns false for JSON Pointer URI fragments.
|
||||
func isJSONPointer(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if s != "" && !strings.HasPrefix(s, "/") {
|
||||
return false
|
||||
}
|
||||
for _, item := range strings.Split(s, "/") {
|
||||
for i := 0; i < len(item); i++ {
|
||||
if item[i] == '~' {
|
||||
if i == len(item)-1 {
|
||||
return false
|
||||
}
|
||||
switch item[i+1] {
|
||||
case '0', '1':
|
||||
// valid
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isRelativeJSONPointer tells whether given string is a valid Relative JSON Pointer.
|
||||
//
|
||||
// see https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
func isRelativeJSONPointer(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if s == "" {
|
||||
return false
|
||||
}
|
||||
if s[0] == '0' {
|
||||
s = s[1:]
|
||||
} else if s[0] >= '0' && s[0] <= '9' {
|
||||
for s != "" && s[0] >= '0' && s[0] <= '9' {
|
||||
s = s[1:]
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
return s == "#" || isJSONPointer(s)
|
||||
}
|
||||
|
||||
// isUUID tells whether given string is a valid uuid format
|
||||
// as specified in RFC4122.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc4122#page-4, for details
|
||||
func isUUID(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
parseHex := func(n int) bool {
|
||||
for n > 0 {
|
||||
if len(s) == 0 {
|
||||
return false
|
||||
}
|
||||
hex := (s[0] >= '0' && s[0] <= '9') || (s[0] >= 'a' && s[0] <= 'f') || (s[0] >= 'A' && s[0] <= 'F')
|
||||
if !hex {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
n--
|
||||
}
|
||||
return true
|
||||
}
|
||||
groups := []int{8, 4, 4, 4, 12}
|
||||
for i, numDigits := range groups {
|
||||
if !parseHex(numDigits) {
|
||||
return false
|
||||
}
|
||||
if i == len(groups)-1 {
|
||||
break
|
||||
}
|
||||
if len(s) == 0 || s[0] != '-' {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
}
|
||||
return len(s) == 0
|
||||
}
|
||||
38
vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go
generated
vendored
38
vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go
generated
vendored
|
|
@ -1,38 +0,0 @@
|
|||
// Package httploader implements loader.Loader for http/https url.
|
||||
//
|
||||
// The package is typically only imported for the side effect of
|
||||
// registering its Loaders.
|
||||
//
|
||||
// To use httploader, link this package into your program:
|
||||
//
|
||||
// import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
package httploader
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v5"
|
||||
)
|
||||
|
||||
// Client is the default HTTP Client used to Get the resource.
|
||||
var Client = http.DefaultClient
|
||||
|
||||
// Load loads resource from given http(s) url.
|
||||
func Load(url string) (io.ReadCloser, error) {
|
||||
resp, err := Client.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
_ = resp.Body.Close()
|
||||
return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode)
|
||||
}
|
||||
return resp.Body, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
jsonschema.Loaders["http"] = Load
|
||||
jsonschema.Loaders["https"] = Load
|
||||
}
|
||||
60
vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
generated
vendored
60
vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
generated
vendored
|
|
@ -1,60 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func loadFileURL(s string) (io.ReadCloser, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f := u.Path
|
||||
if runtime.GOOS == "windows" {
|
||||
f = strings.TrimPrefix(f, "/")
|
||||
f = filepath.FromSlash(f)
|
||||
}
|
||||
return os.Open(f)
|
||||
}
|
||||
|
||||
// Loaders is a registry of functions, which know how to load
|
||||
// absolute url of specific schema.
|
||||
//
|
||||
// New loaders can be registered by adding to this map. Key is schema,
|
||||
// value is function that knows how to load url of that schema
|
||||
var Loaders = map[string]func(url string) (io.ReadCloser, error){
|
||||
"file": loadFileURL,
|
||||
}
|
||||
|
||||
// LoaderNotFoundError is the error type returned by Load function.
|
||||
// It tells that no Loader is registered for that URL Scheme.
|
||||
type LoaderNotFoundError string
|
||||
|
||||
func (e LoaderNotFoundError) Error() string {
|
||||
return fmt.Sprintf("jsonschema: no Loader found for %s", string(e))
|
||||
}
|
||||
|
||||
// LoadURL loads document at given absolute URL. The default implementation
|
||||
// uses Loaders registry to lookup by schema and uses that loader.
|
||||
//
|
||||
// Users can change this variable, if they would like to take complete
|
||||
// responsibility of loading given URL. Used by Compiler if its LoadURL
|
||||
// field is nil.
|
||||
var LoadURL = func(s string) (io.ReadCloser, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
loader, ok := Loaders[u.Scheme]
|
||||
if !ok {
|
||||
return nil, LoaderNotFoundError(s)
|
||||
|
||||
}
|
||||
return loader(s)
|
||||
}
|
||||
77
vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
generated
vendored
77
vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
generated
vendored
|
|
@ -1,77 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
// Flag is output format with simple boolean property valid.
|
||||
type Flag struct {
|
||||
Valid bool `json:"valid"`
|
||||
}
|
||||
|
||||
// FlagOutput returns output in flag format
|
||||
func (ve *ValidationError) FlagOutput() Flag {
|
||||
return Flag{}
|
||||
}
|
||||
|
||||
// Basic ---
|
||||
|
||||
// Basic is output format with flat list of output units.
|
||||
type Basic struct {
|
||||
Valid bool `json:"valid"`
|
||||
Errors []BasicError `json:"errors"`
|
||||
}
|
||||
|
||||
// BasicError is output unit in basic format.
|
||||
type BasicError struct {
|
||||
KeywordLocation string `json:"keywordLocation"`
|
||||
AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
|
||||
InstanceLocation string `json:"instanceLocation"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
// BasicOutput returns output in basic format
|
||||
func (ve *ValidationError) BasicOutput() Basic {
|
||||
var errors []BasicError
|
||||
var flatten func(*ValidationError)
|
||||
flatten = func(ve *ValidationError) {
|
||||
errors = append(errors, BasicError{
|
||||
KeywordLocation: ve.KeywordLocation,
|
||||
AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
|
||||
InstanceLocation: ve.InstanceLocation,
|
||||
Error: ve.Message,
|
||||
})
|
||||
for _, cause := range ve.Causes {
|
||||
flatten(cause)
|
||||
}
|
||||
}
|
||||
flatten(ve)
|
||||
return Basic{Errors: errors}
|
||||
}
|
||||
|
||||
// Detailed ---
|
||||
|
||||
// Detailed is output format based on structure of schema.
|
||||
type Detailed struct {
|
||||
Valid bool `json:"valid"`
|
||||
KeywordLocation string `json:"keywordLocation"`
|
||||
AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
|
||||
InstanceLocation string `json:"instanceLocation"`
|
||||
Error string `json:"error,omitempty"`
|
||||
Errors []Detailed `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
// DetailedOutput returns output in detailed format
|
||||
func (ve *ValidationError) DetailedOutput() Detailed {
|
||||
var errors []Detailed
|
||||
for _, cause := range ve.Causes {
|
||||
errors = append(errors, cause.DetailedOutput())
|
||||
}
|
||||
var message = ve.Message
|
||||
if len(ve.Causes) > 0 {
|
||||
message = ""
|
||||
}
|
||||
return Detailed{
|
||||
KeywordLocation: ve.KeywordLocation,
|
||||
AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
|
||||
InstanceLocation: ve.InstanceLocation,
|
||||
Error: message,
|
||||
Errors: errors,
|
||||
}
|
||||
}
|
||||
280
vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
generated
vendored
280
vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
generated
vendored
|
|
@ -1,280 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type resource struct {
|
||||
url string // base url of resource. can be empty
|
||||
floc string // fragment with json-pointer from root resource
|
||||
doc interface{}
|
||||
draft *Draft
|
||||
subresources map[string]*resource // key is floc. only applicable for root resource
|
||||
schema *Schema
|
||||
}
|
||||
|
||||
func (r *resource) String() string {
|
||||
return r.url + r.floc
|
||||
}
|
||||
|
||||
func newResource(url string, r io.Reader) (*resource, error) {
|
||||
if strings.IndexByte(url, '#') != -1 {
|
||||
panic(fmt.Sprintf("BUG: newResource(%q)", url))
|
||||
}
|
||||
doc, err := unmarshal(r)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("jsonschema: invalid json %s: %v", url, err)
|
||||
}
|
||||
url, err = toAbs(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resource{
|
||||
url: url,
|
||||
floc: "#",
|
||||
doc: doc,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// fillSubschemas fills subschemas in res into r.subresources
|
||||
func (r *resource) fillSubschemas(c *Compiler, res *resource) error {
|
||||
if err := c.validateSchema(r, res.doc, res.floc[1:]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if r.subresources == nil {
|
||||
r.subresources = make(map[string]*resource)
|
||||
}
|
||||
if err := r.draft.listSubschemas(res, r.baseURL(res.floc), r.subresources); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// ensure subresource.url uniqueness
|
||||
url2floc := make(map[string]string)
|
||||
for _, sr := range r.subresources {
|
||||
if sr.url != "" {
|
||||
if floc, ok := url2floc[sr.url]; ok {
|
||||
return fmt.Errorf("jsonschema: %q and %q in %s have same canonical-uri", floc[1:], sr.floc[1:], r.url)
|
||||
}
|
||||
url2floc[sr.url] = sr.floc
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// listResources lists all subresources in res
|
||||
func (r *resource) listResources(res *resource) []*resource {
|
||||
var result []*resource
|
||||
prefix := res.floc + "/"
|
||||
for _, sr := range r.subresources {
|
||||
if strings.HasPrefix(sr.floc, prefix) {
|
||||
result = append(result, sr)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (r *resource) findResource(url string) *resource {
|
||||
if r.url == url {
|
||||
return r
|
||||
}
|
||||
for _, res := range r.subresources {
|
||||
if res.url == url {
|
||||
return res
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// resolve fragment f with sr as base
|
||||
func (r *resource) resolveFragment(c *Compiler, sr *resource, f string) (*resource, error) {
|
||||
if f == "#" || f == "#/" {
|
||||
return sr, nil
|
||||
}
|
||||
|
||||
// resolve by anchor
|
||||
if !strings.HasPrefix(f, "#/") {
|
||||
// check in given resource
|
||||
for _, anchor := range r.draft.anchors(sr.doc) {
|
||||
if anchor == f[1:] {
|
||||
return sr, nil
|
||||
}
|
||||
}
|
||||
|
||||
// check in subresources that has same base url
|
||||
prefix := sr.floc + "/"
|
||||
for _, res := range r.subresources {
|
||||
if strings.HasPrefix(res.floc, prefix) && r.baseURL(res.floc) == sr.url {
|
||||
for _, anchor := range r.draft.anchors(res.doc) {
|
||||
if anchor == f[1:] {
|
||||
return res, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// resolve by ptr
|
||||
floc := sr.floc + f[1:]
|
||||
if res, ok := r.subresources[floc]; ok {
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// non-standrad location
|
||||
doc := r.doc
|
||||
for _, item := range strings.Split(floc[2:], "/") {
|
||||
item = strings.Replace(item, "~1", "/", -1)
|
||||
item = strings.Replace(item, "~0", "~", -1)
|
||||
item, err := url.PathUnescape(item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch d := doc.(type) {
|
||||
case map[string]interface{}:
|
||||
if _, ok := d[item]; !ok {
|
||||
return nil, nil
|
||||
}
|
||||
doc = d[item]
|
||||
case []interface{}:
|
||||
index, err := strconv.Atoi(item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if index < 0 || index >= len(d) {
|
||||
return nil, nil
|
||||
}
|
||||
doc = d[index]
|
||||
default:
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
id, err := r.draft.resolveID(r.baseURL(floc), doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res := &resource{url: id, floc: floc, doc: doc}
|
||||
r.subresources[floc] = res
|
||||
if err := r.fillSubschemas(c, res); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (r *resource) baseURL(floc string) string {
|
||||
for {
|
||||
if sr, ok := r.subresources[floc]; ok {
|
||||
if sr.url != "" {
|
||||
return sr.url
|
||||
}
|
||||
}
|
||||
slash := strings.LastIndexByte(floc, '/')
|
||||
if slash == -1 {
|
||||
break
|
||||
}
|
||||
floc = floc[:slash]
|
||||
}
|
||||
return r.url
|
||||
}
|
||||
|
||||
// url helpers ---
|
||||
|
||||
func toAbs(s string) (string, error) {
|
||||
// if windows absolute file path, convert to file url
|
||||
// because: net/url parses driver name as scheme
|
||||
if runtime.GOOS == "windows" && len(s) >= 3 && s[1:3] == `:\` {
|
||||
s = "file:///" + filepath.ToSlash(s)
|
||||
}
|
||||
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if u.IsAbs() {
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// s is filepath
|
||||
if s, err = filepath.Abs(s); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if runtime.GOOS == "windows" {
|
||||
s = "file:///" + filepath.ToSlash(s)
|
||||
} else {
|
||||
s = "file://" + s
|
||||
}
|
||||
u, err = url.Parse(s) // to fix spaces in filepath
|
||||
return u.String(), err
|
||||
}
|
||||
|
||||
func resolveURL(base, ref string) (string, error) {
|
||||
if ref == "" {
|
||||
return base, nil
|
||||
}
|
||||
if strings.HasPrefix(ref, "urn:") {
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
refURL, err := url.Parse(ref)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if refURL.IsAbs() {
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
if strings.HasPrefix(base, "urn:") {
|
||||
base, _ = split(base)
|
||||
return base + ref, nil
|
||||
}
|
||||
|
||||
baseURL, err := url.Parse(base)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return baseURL.ResolveReference(refURL).String(), nil
|
||||
}
|
||||
|
||||
func split(uri string) (string, string) {
|
||||
hash := strings.IndexByte(uri, '#')
|
||||
if hash == -1 {
|
||||
return uri, "#"
|
||||
}
|
||||
f := uri[hash:]
|
||||
if f == "#/" {
|
||||
f = "#"
|
||||
}
|
||||
return uri[0:hash], f
|
||||
}
|
||||
|
||||
func (s *Schema) url() string {
|
||||
u, _ := split(s.Location)
|
||||
return u
|
||||
}
|
||||
|
||||
func (s *Schema) loc() string {
|
||||
_, f := split(s.Location)
|
||||
return f[1:]
|
||||
}
|
||||
|
||||
func unmarshal(r io.Reader) (interface{}, error) {
|
||||
decoder := json.NewDecoder(r)
|
||||
decoder.UseNumber()
|
||||
var doc interface{}
|
||||
if err := decoder.Decode(&doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if t, _ := decoder.Token(); t != nil {
|
||||
return nil, fmt.Errorf("invalid character %v after top-level value", t)
|
||||
}
|
||||
return doc, nil
|
||||
}
|
||||
900
vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
generated
vendored
900
vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
generated
vendored
|
|
@ -1,900 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"math/big"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// A Schema represents compiled version of json-schema.
|
||||
type Schema struct {
|
||||
Location string // absolute location
|
||||
|
||||
Draft *Draft // draft used by schema.
|
||||
meta *Schema
|
||||
vocab []string
|
||||
dynamicAnchors []*Schema
|
||||
|
||||
// type agnostic validations
|
||||
Format string
|
||||
format func(interface{}) bool
|
||||
Always *bool // always pass/fail. used when booleans are used as schemas in draft-07.
|
||||
Ref *Schema
|
||||
RecursiveAnchor bool
|
||||
RecursiveRef *Schema
|
||||
DynamicAnchor string
|
||||
DynamicRef *Schema
|
||||
dynamicRefAnchor string
|
||||
Types []string // allowed types.
|
||||
Constant []interface{} // first element in slice is constant value. note: slice is used to capture nil constant.
|
||||
Enum []interface{} // allowed values.
|
||||
enumError string // error message for enum fail. captured here to avoid constructing error message every time.
|
||||
Not *Schema
|
||||
AllOf []*Schema
|
||||
AnyOf []*Schema
|
||||
OneOf []*Schema
|
||||
If *Schema
|
||||
Then *Schema // nil, when If is nil.
|
||||
Else *Schema // nil, when If is nil.
|
||||
|
||||
// object validations
|
||||
MinProperties int // -1 if not specified.
|
||||
MaxProperties int // -1 if not specified.
|
||||
Required []string // list of required properties.
|
||||
Properties map[string]*Schema
|
||||
PropertyNames *Schema
|
||||
RegexProperties bool // property names must be valid regex. used only in draft4 as workaround in metaschema.
|
||||
PatternProperties map[*regexp.Regexp]*Schema
|
||||
AdditionalProperties interface{} // nil or bool or *Schema.
|
||||
Dependencies map[string]interface{} // map value is *Schema or []string.
|
||||
DependentRequired map[string][]string
|
||||
DependentSchemas map[string]*Schema
|
||||
UnevaluatedProperties *Schema
|
||||
|
||||
// array validations
|
||||
MinItems int // -1 if not specified.
|
||||
MaxItems int // -1 if not specified.
|
||||
UniqueItems bool
|
||||
Items interface{} // nil or *Schema or []*Schema
|
||||
AdditionalItems interface{} // nil or bool or *Schema.
|
||||
PrefixItems []*Schema
|
||||
Items2020 *Schema // items keyword reintroduced in draft 2020-12
|
||||
Contains *Schema
|
||||
ContainsEval bool // whether any item in an array that passes validation of the contains schema is considered "evaluated"
|
||||
MinContains int // 1 if not specified
|
||||
MaxContains int // -1 if not specified
|
||||
UnevaluatedItems *Schema
|
||||
|
||||
// string validations
|
||||
MinLength int // -1 if not specified.
|
||||
MaxLength int // -1 if not specified.
|
||||
Pattern *regexp.Regexp
|
||||
ContentEncoding string
|
||||
decoder func(string) ([]byte, error)
|
||||
ContentMediaType string
|
||||
mediaType func([]byte) error
|
||||
ContentSchema *Schema
|
||||
|
||||
// number validators
|
||||
Minimum *big.Rat
|
||||
ExclusiveMinimum *big.Rat
|
||||
Maximum *big.Rat
|
||||
ExclusiveMaximum *big.Rat
|
||||
MultipleOf *big.Rat
|
||||
|
||||
// annotations. captured only when Compiler.ExtractAnnotations is true.
|
||||
Title string
|
||||
Description string
|
||||
Default interface{}
|
||||
Comment string
|
||||
ReadOnly bool
|
||||
WriteOnly bool
|
||||
Examples []interface{}
|
||||
Deprecated bool
|
||||
|
||||
// user defined extensions
|
||||
Extensions map[string]ExtSchema
|
||||
}
|
||||
|
||||
func (s *Schema) String() string {
|
||||
return s.Location
|
||||
}
|
||||
|
||||
func newSchema(url, floc string, draft *Draft, doc interface{}) *Schema {
|
||||
// fill with default values
|
||||
s := &Schema{
|
||||
Location: url + floc,
|
||||
Draft: draft,
|
||||
MinProperties: -1,
|
||||
MaxProperties: -1,
|
||||
MinItems: -1,
|
||||
MaxItems: -1,
|
||||
MinContains: 1,
|
||||
MaxContains: -1,
|
||||
MinLength: -1,
|
||||
MaxLength: -1,
|
||||
}
|
||||
|
||||
if doc, ok := doc.(map[string]interface{}); ok {
|
||||
if ra, ok := doc["$recursiveAnchor"]; ok {
|
||||
if ra, ok := ra.(bool); ok {
|
||||
s.RecursiveAnchor = ra
|
||||
}
|
||||
}
|
||||
if da, ok := doc["$dynamicAnchor"]; ok {
|
||||
if da, ok := da.(string); ok {
|
||||
s.DynamicAnchor = da
|
||||
}
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *Schema) hasVocab(name string) bool {
|
||||
if s == nil { // during bootstrap
|
||||
return true
|
||||
}
|
||||
if name == "core" {
|
||||
return true
|
||||
}
|
||||
for _, url := range s.vocab {
|
||||
if url == "https://json-schema.org/draft/2019-09/vocab/"+name {
|
||||
return true
|
||||
}
|
||||
if url == "https://json-schema.org/draft/2020-12/vocab/"+name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Validate validates given doc, against the json-schema s.
|
||||
//
|
||||
// the v must be the raw json value. for number precision
|
||||
// unmarshal with json.UseNumber().
|
||||
//
|
||||
// returns *ValidationError if v does not confirm with schema s.
|
||||
// returns InfiniteLoopError if it detects loop during validation.
|
||||
// returns InvalidJSONTypeError if it detects any non json value in v.
|
||||
func (s *Schema) Validate(v interface{}) (err error) {
|
||||
return s.validateValue(v, "")
|
||||
}
|
||||
|
||||
func (s *Schema) validateValue(v interface{}, vloc string) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
switch r := r.(type) {
|
||||
case InfiniteLoopError, InvalidJSONTypeError:
|
||||
err = r.(error)
|
||||
default:
|
||||
panic(r)
|
||||
}
|
||||
}
|
||||
}()
|
||||
if _, err := s.validate(nil, 0, "", v, vloc); err != nil {
|
||||
ve := ValidationError{
|
||||
KeywordLocation: "",
|
||||
AbsoluteKeywordLocation: s.Location,
|
||||
InstanceLocation: vloc,
|
||||
Message: fmt.Sprintf("doesn't validate with %s", s.Location),
|
||||
}
|
||||
return ve.causes(err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// validate validates given value v with this schema.
|
||||
func (s *Schema) validate(scope []schemaRef, vscope int, spath string, v interface{}, vloc string) (result validationResult, err error) {
|
||||
validationError := func(keywordPath string, format string, a ...interface{}) *ValidationError {
|
||||
return &ValidationError{
|
||||
KeywordLocation: keywordLocation(scope, keywordPath),
|
||||
AbsoluteKeywordLocation: joinPtr(s.Location, keywordPath),
|
||||
InstanceLocation: vloc,
|
||||
Message: fmt.Sprintf(format, a...),
|
||||
}
|
||||
}
|
||||
|
||||
sref := schemaRef{spath, s, false}
|
||||
if err := checkLoop(scope[len(scope)-vscope:], sref); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
scope = append(scope, sref)
|
||||
vscope++
|
||||
|
||||
// populate result
|
||||
switch v := v.(type) {
|
||||
case map[string]interface{}:
|
||||
result.unevalProps = make(map[string]struct{})
|
||||
for pname := range v {
|
||||
result.unevalProps[pname] = struct{}{}
|
||||
}
|
||||
case []interface{}:
|
||||
result.unevalItems = make(map[int]struct{})
|
||||
for i := range v {
|
||||
result.unevalItems[i] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
validate := func(sch *Schema, schPath string, v interface{}, vpath string) error {
|
||||
vloc := vloc
|
||||
if vpath != "" {
|
||||
vloc += "/" + vpath
|
||||
}
|
||||
_, err := sch.validate(scope, 0, schPath, v, vloc)
|
||||
return err
|
||||
}
|
||||
|
||||
validateInplace := func(sch *Schema, schPath string) error {
|
||||
vr, err := sch.validate(scope, vscope, schPath, v, vloc)
|
||||
if err == nil {
|
||||
// update result
|
||||
for pname := range result.unevalProps {
|
||||
if _, ok := vr.unevalProps[pname]; !ok {
|
||||
delete(result.unevalProps, pname)
|
||||
}
|
||||
}
|
||||
for i := range result.unevalItems {
|
||||
if _, ok := vr.unevalItems[i]; !ok {
|
||||
delete(result.unevalItems, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if s.Always != nil {
|
||||
if !*s.Always {
|
||||
return result, validationError("", "not allowed")
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
if len(s.Types) > 0 {
|
||||
vType := jsonType(v)
|
||||
matched := false
|
||||
for _, t := range s.Types {
|
||||
if vType == t {
|
||||
matched = true
|
||||
break
|
||||
} else if t == "integer" && vType == "number" {
|
||||
num, _ := new(big.Rat).SetString(fmt.Sprint(v))
|
||||
if num.IsInt() {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return result, validationError("type", "expected %s, but got %s", strings.Join(s.Types, " or "), vType)
|
||||
}
|
||||
}
|
||||
|
||||
var errors []error
|
||||
|
||||
if len(s.Constant) > 0 {
|
||||
if !equals(v, s.Constant[0]) {
|
||||
switch jsonType(s.Constant[0]) {
|
||||
case "object", "array":
|
||||
errors = append(errors, validationError("const", "const failed"))
|
||||
default:
|
||||
errors = append(errors, validationError("const", "value must be %#v", s.Constant[0]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.Enum) > 0 {
|
||||
matched := false
|
||||
for _, item := range s.Enum {
|
||||
if equals(v, item) {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
errors = append(errors, validationError("enum", s.enumError))
|
||||
}
|
||||
}
|
||||
|
||||
if s.format != nil && !s.format(v) {
|
||||
var val = v
|
||||
if v, ok := v.(string); ok {
|
||||
val = quote(v)
|
||||
}
|
||||
errors = append(errors, validationError("format", "%v is not valid %s", val, quote(s.Format)))
|
||||
}
|
||||
|
||||
switch v := v.(type) {
|
||||
case map[string]interface{}:
|
||||
if s.MinProperties != -1 && len(v) < s.MinProperties {
|
||||
errors = append(errors, validationError("minProperties", "minimum %d properties allowed, but found %d properties", s.MinProperties, len(v)))
|
||||
}
|
||||
if s.MaxProperties != -1 && len(v) > s.MaxProperties {
|
||||
errors = append(errors, validationError("maxProperties", "maximum %d properties allowed, but found %d properties", s.MaxProperties, len(v)))
|
||||
}
|
||||
if len(s.Required) > 0 {
|
||||
var missing []string
|
||||
for _, pname := range s.Required {
|
||||
if _, ok := v[pname]; !ok {
|
||||
missing = append(missing, quote(pname))
|
||||
}
|
||||
}
|
||||
if len(missing) > 0 {
|
||||
errors = append(errors, validationError("required", "missing properties: %s", strings.Join(missing, ", ")))
|
||||
}
|
||||
}
|
||||
|
||||
for pname, sch := range s.Properties {
|
||||
if pvalue, ok := v[pname]; ok {
|
||||
delete(result.unevalProps, pname)
|
||||
if err := validate(sch, "properties/"+escape(pname), pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.PropertyNames != nil {
|
||||
for pname := range v {
|
||||
if err := validate(s.PropertyNames, "propertyNames", pname, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.RegexProperties {
|
||||
for pname := range v {
|
||||
if !isRegex(pname) {
|
||||
errors = append(errors, validationError("", "patternProperty %s is not valid regex", quote(pname)))
|
||||
}
|
||||
}
|
||||
}
|
||||
for pattern, sch := range s.PatternProperties {
|
||||
for pname, pvalue := range v {
|
||||
if pattern.MatchString(pname) {
|
||||
delete(result.unevalProps, pname)
|
||||
if err := validate(sch, "patternProperties/"+escape(pattern.String()), pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if s.AdditionalProperties != nil {
|
||||
if allowed, ok := s.AdditionalProperties.(bool); ok {
|
||||
if !allowed && len(result.unevalProps) > 0 {
|
||||
errors = append(errors, validationError("additionalProperties", "additionalProperties %s not allowed", result.unevalPnames()))
|
||||
}
|
||||
} else {
|
||||
schema := s.AdditionalProperties.(*Schema)
|
||||
for pname := range result.unevalProps {
|
||||
if pvalue, ok := v[pname]; ok {
|
||||
if err := validate(schema, "additionalProperties", pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
result.unevalProps = nil
|
||||
}
|
||||
for dname, dvalue := range s.Dependencies {
|
||||
if _, ok := v[dname]; ok {
|
||||
switch dvalue := dvalue.(type) {
|
||||
case *Schema:
|
||||
if err := validateInplace(dvalue, "dependencies/"+escape(dname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
case []string:
|
||||
for i, pname := range dvalue {
|
||||
if _, ok := v[pname]; !ok {
|
||||
errors = append(errors, validationError("dependencies/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for dname, dvalue := range s.DependentRequired {
|
||||
if _, ok := v[dname]; ok {
|
||||
for i, pname := range dvalue {
|
||||
if _, ok := v[pname]; !ok {
|
||||
errors = append(errors, validationError("dependentRequired/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for dname, sch := range s.DependentSchemas {
|
||||
if _, ok := v[dname]; ok {
|
||||
if err := validateInplace(sch, "dependentSchemas/"+escape(dname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case []interface{}:
|
||||
if s.MinItems != -1 && len(v) < s.MinItems {
|
||||
errors = append(errors, validationError("minItems", "minimum %d items required, but found %d items", s.MinItems, len(v)))
|
||||
}
|
||||
if s.MaxItems != -1 && len(v) > s.MaxItems {
|
||||
errors = append(errors, validationError("maxItems", "maximum %d items required, but found %d items", s.MaxItems, len(v)))
|
||||
}
|
||||
if s.UniqueItems {
|
||||
if len(v) <= 20 {
|
||||
outer1:
|
||||
for i := 1; i < len(v); i++ {
|
||||
for j := 0; j < i; j++ {
|
||||
if equals(v[i], v[j]) {
|
||||
errors = append(errors, validationError("uniqueItems", "items at index %d and %d are equal", j, i))
|
||||
break outer1
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
m := make(map[uint64][]int)
|
||||
var h maphash.Hash
|
||||
outer2:
|
||||
for i, item := range v {
|
||||
h.Reset()
|
||||
hash(item, &h)
|
||||
k := h.Sum64()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
arr, ok := m[k]
|
||||
if ok {
|
||||
for _, j := range arr {
|
||||
if equals(v[j], item) {
|
||||
errors = append(errors, validationError("uniqueItems", "items at index %d and %d are equal", j, i))
|
||||
break outer2
|
||||
}
|
||||
}
|
||||
}
|
||||
arr = append(arr, i)
|
||||
m[k] = arr
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// items + additionalItems
|
||||
switch items := s.Items.(type) {
|
||||
case *Schema:
|
||||
for i, item := range v {
|
||||
if err := validate(items, "items", item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
result.unevalItems = nil
|
||||
case []*Schema:
|
||||
for i, item := range v {
|
||||
if i < len(items) {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(items[i], "items/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else if sch, ok := s.AdditionalItems.(*Schema); ok {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(sch, "additionalItems", item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if additionalItems, ok := s.AdditionalItems.(bool); ok {
|
||||
if additionalItems {
|
||||
result.unevalItems = nil
|
||||
} else if len(v) > len(items) {
|
||||
errors = append(errors, validationError("additionalItems", "only %d items are allowed, but found %d items", len(items), len(v)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// prefixItems + items
|
||||
for i, item := range v {
|
||||
if i < len(s.PrefixItems) {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(s.PrefixItems[i], "prefixItems/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else if s.Items2020 != nil {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(s.Items2020, "items", item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// contains + minContains + maxContains
|
||||
if s.Contains != nil && (s.MinContains != -1 || s.MaxContains != -1) {
|
||||
matched := 0
|
||||
var causes []error
|
||||
for i, item := range v {
|
||||
if err := validate(s.Contains, "contains", item, strconv.Itoa(i)); err != nil {
|
||||
causes = append(causes, err)
|
||||
} else {
|
||||
matched++
|
||||
if s.ContainsEval {
|
||||
delete(result.unevalItems, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
if s.MinContains != -1 && matched < s.MinContains {
|
||||
errors = append(errors, validationError("minContains", "valid must be >= %d, but got %d", s.MinContains, matched).add(causes...))
|
||||
}
|
||||
if s.MaxContains != -1 && matched > s.MaxContains {
|
||||
errors = append(errors, validationError("maxContains", "valid must be <= %d, but got %d", s.MaxContains, matched))
|
||||
}
|
||||
}
|
||||
|
||||
case string:
|
||||
// minLength + maxLength
|
||||
if s.MinLength != -1 || s.MaxLength != -1 {
|
||||
length := utf8.RuneCount([]byte(v))
|
||||
if s.MinLength != -1 && length < s.MinLength {
|
||||
errors = append(errors, validationError("minLength", "length must be >= %d, but got %d", s.MinLength, length))
|
||||
}
|
||||
if s.MaxLength != -1 && length > s.MaxLength {
|
||||
errors = append(errors, validationError("maxLength", "length must be <= %d, but got %d", s.MaxLength, length))
|
||||
}
|
||||
}
|
||||
|
||||
if s.Pattern != nil && !s.Pattern.MatchString(v) {
|
||||
errors = append(errors, validationError("pattern", "does not match pattern %s", quote(s.Pattern.String())))
|
||||
}
|
||||
|
||||
// contentEncoding + contentMediaType
|
||||
if s.decoder != nil || s.mediaType != nil {
|
||||
decoded := s.ContentEncoding == ""
|
||||
var content []byte
|
||||
if s.decoder != nil {
|
||||
b, err := s.decoder(v)
|
||||
if err != nil {
|
||||
errors = append(errors, validationError("contentEncoding", "value is not %s encoded", s.ContentEncoding))
|
||||
} else {
|
||||
content, decoded = b, true
|
||||
}
|
||||
}
|
||||
if decoded && s.mediaType != nil {
|
||||
if s.decoder == nil {
|
||||
content = []byte(v)
|
||||
}
|
||||
if err := s.mediaType(content); err != nil {
|
||||
errors = append(errors, validationError("contentMediaType", "value is not of mediatype %s", quote(s.ContentMediaType)))
|
||||
}
|
||||
}
|
||||
if decoded && s.ContentSchema != nil {
|
||||
contentJSON, err := unmarshal(bytes.NewReader(content))
|
||||
if err != nil {
|
||||
errors = append(errors, validationError("contentSchema", "value is not valid json"))
|
||||
} else {
|
||||
err := validate(s.ContentSchema, "contentSchema", contentJSON, "")
|
||||
if err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
|
||||
// lazy convert to *big.Rat to avoid allocation
|
||||
var numVal *big.Rat
|
||||
num := func() *big.Rat {
|
||||
if numVal == nil {
|
||||
numVal, _ = new(big.Rat).SetString(fmt.Sprint(v))
|
||||
}
|
||||
return numVal
|
||||
}
|
||||
f64 := func(r *big.Rat) float64 {
|
||||
f, _ := r.Float64()
|
||||
return f
|
||||
}
|
||||
if s.Minimum != nil && num().Cmp(s.Minimum) < 0 {
|
||||
errors = append(errors, validationError("minimum", "must be >= %v but found %v", f64(s.Minimum), v))
|
||||
}
|
||||
if s.ExclusiveMinimum != nil && num().Cmp(s.ExclusiveMinimum) <= 0 {
|
||||
errors = append(errors, validationError("exclusiveMinimum", "must be > %v but found %v", f64(s.ExclusiveMinimum), v))
|
||||
}
|
||||
if s.Maximum != nil && num().Cmp(s.Maximum) > 0 {
|
||||
errors = append(errors, validationError("maximum", "must be <= %v but found %v", f64(s.Maximum), v))
|
||||
}
|
||||
if s.ExclusiveMaximum != nil && num().Cmp(s.ExclusiveMaximum) >= 0 {
|
||||
errors = append(errors, validationError("exclusiveMaximum", "must be < %v but found %v", f64(s.ExclusiveMaximum), v))
|
||||
}
|
||||
if s.MultipleOf != nil {
|
||||
if q := new(big.Rat).Quo(num(), s.MultipleOf); !q.IsInt() {
|
||||
errors = append(errors, validationError("multipleOf", "%v not multipleOf %v", v, f64(s.MultipleOf)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// $ref + $recursiveRef + $dynamicRef
|
||||
validateRef := func(sch *Schema, refPath string) error {
|
||||
if sch != nil {
|
||||
if err := validateInplace(sch, refPath); err != nil {
|
||||
var url = sch.Location
|
||||
if s.url() == sch.url() {
|
||||
url = sch.loc()
|
||||
}
|
||||
return validationError(refPath, "doesn't validate with %s", quote(url)).causes(err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := validateRef(s.Ref, "$ref"); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
if s.RecursiveRef != nil {
|
||||
sch := s.RecursiveRef
|
||||
if sch.RecursiveAnchor {
|
||||
// recursiveRef based on scope
|
||||
for _, e := range scope {
|
||||
if e.schema.RecursiveAnchor {
|
||||
sch = e.schema
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := validateRef(sch, "$recursiveRef"); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
if s.DynamicRef != nil {
|
||||
sch := s.DynamicRef
|
||||
if s.dynamicRefAnchor != "" && sch.DynamicAnchor == s.dynamicRefAnchor {
|
||||
// dynamicRef based on scope
|
||||
for i := len(scope) - 1; i >= 0; i-- {
|
||||
sr := scope[i]
|
||||
if sr.discard {
|
||||
break
|
||||
}
|
||||
for _, da := range sr.schema.dynamicAnchors {
|
||||
if da.DynamicAnchor == s.DynamicRef.DynamicAnchor && da != s.DynamicRef {
|
||||
sch = da
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := validateRef(sch, "$dynamicRef"); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
|
||||
if s.Not != nil && validateInplace(s.Not, "not") == nil {
|
||||
errors = append(errors, validationError("not", "not failed"))
|
||||
}
|
||||
|
||||
for i, sch := range s.AllOf {
|
||||
schPath := "allOf/" + strconv.Itoa(i)
|
||||
if err := validateInplace(sch, schPath); err != nil {
|
||||
errors = append(errors, validationError(schPath, "allOf failed").add(err))
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.AnyOf) > 0 {
|
||||
matched := false
|
||||
var causes []error
|
||||
for i, sch := range s.AnyOf {
|
||||
if err := validateInplace(sch, "anyOf/"+strconv.Itoa(i)); err == nil {
|
||||
matched = true
|
||||
} else {
|
||||
causes = append(causes, err)
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
errors = append(errors, validationError("anyOf", "anyOf failed").add(causes...))
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.OneOf) > 0 {
|
||||
matched := -1
|
||||
var causes []error
|
||||
for i, sch := range s.OneOf {
|
||||
if err := validateInplace(sch, "oneOf/"+strconv.Itoa(i)); err == nil {
|
||||
if matched == -1 {
|
||||
matched = i
|
||||
} else {
|
||||
errors = append(errors, validationError("oneOf", "valid against schemas at indexes %d and %d", matched, i))
|
||||
break
|
||||
}
|
||||
} else {
|
||||
causes = append(causes, err)
|
||||
}
|
||||
}
|
||||
if matched == -1 {
|
||||
errors = append(errors, validationError("oneOf", "oneOf failed").add(causes...))
|
||||
}
|
||||
}
|
||||
|
||||
// if + then + else
|
||||
if s.If != nil {
|
||||
err := validateInplace(s.If, "if")
|
||||
// "if" leaves dynamic scope
|
||||
scope[len(scope)-1].discard = true
|
||||
if err == nil {
|
||||
if s.Then != nil {
|
||||
if err := validateInplace(s.Then, "then"); err != nil {
|
||||
errors = append(errors, validationError("then", "if-then failed").add(err))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if s.Else != nil {
|
||||
if err := validateInplace(s.Else, "else"); err != nil {
|
||||
errors = append(errors, validationError("else", "if-else failed").add(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
// restore dynamic scope
|
||||
scope[len(scope)-1].discard = false
|
||||
}
|
||||
|
||||
for _, ext := range s.Extensions {
|
||||
if err := ext.Validate(ValidationContext{result, validate, validateInplace, validationError}, v); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
|
||||
// unevaluatedProperties + unevaluatedItems
|
||||
switch v := v.(type) {
|
||||
case map[string]interface{}:
|
||||
if s.UnevaluatedProperties != nil {
|
||||
for pname := range result.unevalProps {
|
||||
if pvalue, ok := v[pname]; ok {
|
||||
if err := validate(s.UnevaluatedProperties, "unevaluatedProperties", pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
result.unevalProps = nil
|
||||
}
|
||||
case []interface{}:
|
||||
if s.UnevaluatedItems != nil {
|
||||
for i := range result.unevalItems {
|
||||
if err := validate(s.UnevaluatedItems, "unevaluatedItems", v[i], strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
result.unevalItems = nil
|
||||
}
|
||||
}
|
||||
|
||||
switch len(errors) {
|
||||
case 0:
|
||||
return result, nil
|
||||
case 1:
|
||||
return result, errors[0]
|
||||
default:
|
||||
return result, validationError("", "").add(errors...) // empty message, used just for wrapping
|
||||
}
|
||||
}
|
||||
|
||||
type validationResult struct {
|
||||
unevalProps map[string]struct{}
|
||||
unevalItems map[int]struct{}
|
||||
}
|
||||
|
||||
func (vr validationResult) unevalPnames() string {
|
||||
pnames := make([]string, 0, len(vr.unevalProps))
|
||||
for pname := range vr.unevalProps {
|
||||
pnames = append(pnames, quote(pname))
|
||||
}
|
||||
return strings.Join(pnames, ", ")
|
||||
}
|
||||
|
||||
// jsonType returns the json type of given value v.
|
||||
//
|
||||
// It panics if the given value is not valid json value
|
||||
func jsonType(v interface{}) string {
|
||||
switch v.(type) {
|
||||
case nil:
|
||||
return "null"
|
||||
case bool:
|
||||
return "boolean"
|
||||
case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
|
||||
return "number"
|
||||
case string:
|
||||
return "string"
|
||||
case []interface{}:
|
||||
return "array"
|
||||
case map[string]interface{}:
|
||||
return "object"
|
||||
}
|
||||
panic(InvalidJSONTypeError(fmt.Sprintf("%T", v)))
|
||||
}
|
||||
|
||||
// equals tells if given two json values are equal or not.
|
||||
func equals(v1, v2 interface{}) bool {
|
||||
v1Type := jsonType(v1)
|
||||
if v1Type != jsonType(v2) {
|
||||
return false
|
||||
}
|
||||
switch v1Type {
|
||||
case "array":
|
||||
arr1, arr2 := v1.([]interface{}), v2.([]interface{})
|
||||
if len(arr1) != len(arr2) {
|
||||
return false
|
||||
}
|
||||
for i := range arr1 {
|
||||
if !equals(arr1[i], arr2[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case "object":
|
||||
obj1, obj2 := v1.(map[string]interface{}), v2.(map[string]interface{})
|
||||
if len(obj1) != len(obj2) {
|
||||
return false
|
||||
}
|
||||
for k, v1 := range obj1 {
|
||||
if v2, ok := obj2[k]; ok {
|
||||
if !equals(v1, v2) {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case "number":
|
||||
num1, _ := new(big.Rat).SetString(fmt.Sprint(v1))
|
||||
num2, _ := new(big.Rat).SetString(fmt.Sprint(v2))
|
||||
return num1.Cmp(num2) == 0
|
||||
default:
|
||||
return v1 == v2
|
||||
}
|
||||
}
|
||||
|
||||
func hash(v interface{}, h *maphash.Hash) {
|
||||
switch v := v.(type) {
|
||||
case nil:
|
||||
h.WriteByte(0)
|
||||
case bool:
|
||||
h.WriteByte(1)
|
||||
if v {
|
||||
h.WriteByte(1)
|
||||
} else {
|
||||
h.WriteByte(0)
|
||||
}
|
||||
case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
|
||||
h.WriteByte(2)
|
||||
num, _ := new(big.Rat).SetString(fmt.Sprint(v))
|
||||
h.Write(num.Num().Bytes())
|
||||
h.Write(num.Denom().Bytes())
|
||||
case string:
|
||||
h.WriteByte(3)
|
||||
h.WriteString(v)
|
||||
case []interface{}:
|
||||
h.WriteByte(4)
|
||||
for _, item := range v {
|
||||
hash(item, h)
|
||||
}
|
||||
case map[string]interface{}:
|
||||
h.WriteByte(5)
|
||||
props := make([]string, 0, len(v))
|
||||
for prop := range v {
|
||||
props = append(props, prop)
|
||||
}
|
||||
sort.Slice(props, func(i, j int) bool {
|
||||
return props[i] < props[j]
|
||||
})
|
||||
for _, prop := range props {
|
||||
hash(prop, h)
|
||||
hash(v[prop], h)
|
||||
}
|
||||
default:
|
||||
panic(InvalidJSONTypeError(fmt.Sprintf("%T", v)))
|
||||
}
|
||||
}
|
||||
|
||||
// escape converts given token to valid json-pointer token
|
||||
func escape(token string) string {
|
||||
token = strings.ReplaceAll(token, "~", "~0")
|
||||
token = strings.ReplaceAll(token, "/", "~1")
|
||||
return url.PathEscape(token)
|
||||
}
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
[submodule "testdata/JSON-Schema-Test-Suite"]
|
||||
path = testdata/JSON-Schema-Test-Suite
|
||||
url = https://github.com/json-schema-org/JSON-Schema-Test-Suite.git
|
||||
branch = main
|
||||
5
vendor/github.com/santhosh-tekuri/jsonschema/v6/.golangci.yml
generated
vendored
Normal file
5
vendor/github.com/santhosh-tekuri/jsonschema/v6/.golangci.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
linters:
|
||||
enable:
|
||||
- nakedret
|
||||
- errname
|
||||
- godot
|
||||
7
vendor/github.com/santhosh-tekuri/jsonschema/v6/.pre-commit-hooks.yaml
generated
vendored
Normal file
7
vendor/github.com/santhosh-tekuri/jsonschema/v6/.pre-commit-hooks.yaml
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
- id: jsonschema-validate
|
||||
name: Validate JSON against JSON Schema
|
||||
description: ensure json files follow specified JSON Schema
|
||||
entry: jv
|
||||
language: golang
|
||||
additional_dependencies:
|
||||
- ./cmd/jv
|
||||
86
vendor/github.com/santhosh-tekuri/jsonschema/v6/README.md
generated
vendored
Normal file
86
vendor/github.com/santhosh-tekuri/jsonschema/v6/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
# jsonschema v6.0.0
|
||||
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v6)
|
||||
[](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema/v6)
|
||||
[](https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml)
|
||||
[](https://codecov.io/gh/santhosh-tekuri/jsonschema/tree/boon)
|
||||
|
||||
see [godoc](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v6) for examples
|
||||
|
||||
## Library Features
|
||||
|
||||
- [x] pass [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite) excluding optional(compare with other impls at [bowtie](https://bowtie-json-schema.github.io/bowtie/#))
|
||||
- [x] [](https://bowtie.report/#/dialects/draft4)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft6)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft7)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft2019-09)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft2020-12)
|
||||
- [x] detect infinite loop traps
|
||||
- [x] `$schema` cycle
|
||||
- [x] validation cycle
|
||||
- [x] custom `$schema` url
|
||||
- [x] vocabulary based validation
|
||||
- [x] custom regex engine
|
||||
- [x] format assertions
|
||||
- [x] flag to enable in draft >= 2019-09
|
||||
- [x] custom format registration
|
||||
- [x] built-in formats
|
||||
- [x] regex, uuid
|
||||
- [x] ipv4, ipv6
|
||||
- [x] hostname, email
|
||||
- [x] date, time, date-time, duration
|
||||
- [x] json-pointer, relative-json-pointer
|
||||
- [x] uri, uri-reference, uri-template
|
||||
- [x] iri, iri-reference
|
||||
- [x] period, semver
|
||||
- [x] content assertions
|
||||
- [x] flag to enable in draft >= 7
|
||||
- [x] contentEncoding
|
||||
- [x] base64
|
||||
- [x] custom
|
||||
- [x] contentMediaType
|
||||
- [x] application/json
|
||||
- [x] custom
|
||||
- [x] contentSchema
|
||||
- [x] errors
|
||||
- [x] introspectable
|
||||
- [x] hierarchy
|
||||
- [x] alternative display with `#`
|
||||
- [x] output
|
||||
- [x] flag
|
||||
- [x] basic
|
||||
- [x] detailed
|
||||
- [x] custom vocabulary
|
||||
- enable via `$vocabulary` for draft >=2019-19
|
||||
- enable via flag for draft <= 7
|
||||
- [x] mixed dialect support
|
||||
|
||||
## CLI
|
||||
|
||||
to install: `go install github.com/santhosh-tekuri/jsonschema/cmd/jv@latest`
|
||||
|
||||
```
|
||||
Usage: jv [OPTIONS] SCHEMA [INSTANCE...]
|
||||
|
||||
Options:
|
||||
-c, --assert-content Enable content assertions with draft >= 7
|
||||
-f, --assert-format Enable format assertions with draft >= 2019
|
||||
--cacert pem-file Use the specified pem-file to verify the peer. The file may contain multiple CA certificates
|
||||
-d, --draft version Draft version used when '$schema' is missing. Valid values 4, 6, 7, 2019, 2020 (default 2020)
|
||||
-h, --help Print help information
|
||||
-k, --insecure Use insecure TLS connection
|
||||
-o, --output format Output format. Valid values simple, alt, flag, basic, detailed (default "simple")
|
||||
-q, --quiet Do not print errors
|
||||
-v, --version Print build information
|
||||
```
|
||||
|
||||
- [x] exit code `1` for validation erros, `2` for usage errors
|
||||
- [x] validate both schema and multiple instances
|
||||
- [x] support both json and yaml files
|
||||
- [x] support standard input, use `-`
|
||||
- [x] quite mode with parsable output
|
||||
- [x] http(s) url support
|
||||
- [x] custom certs for validation, use `--cacert`
|
||||
- [x] flag to skip certificate verification, use `--insecure`
|
||||
|
||||
332
vendor/github.com/santhosh-tekuri/jsonschema/v6/compiler.go
generated
vendored
Normal file
332
vendor/github.com/santhosh-tekuri/jsonschema/v6/compiler.go
generated
vendored
Normal file
|
|
@ -0,0 +1,332 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"slices"
|
||||
)
|
||||
|
||||
// Compiler compiles json schema into *Schema.
|
||||
type Compiler struct {
|
||||
schemas map[urlPtr]*Schema
|
||||
roots *roots
|
||||
formats map[string]*Format
|
||||
decoders map[string]*Decoder
|
||||
mediaTypes map[string]*MediaType
|
||||
assertFormat bool
|
||||
assertContent bool
|
||||
}
|
||||
|
||||
// NewCompiler create Compiler Object.
|
||||
func NewCompiler() *Compiler {
|
||||
return &Compiler{
|
||||
schemas: map[urlPtr]*Schema{},
|
||||
roots: newRoots(),
|
||||
formats: map[string]*Format{},
|
||||
decoders: map[string]*Decoder{},
|
||||
mediaTypes: map[string]*MediaType{},
|
||||
assertFormat: false,
|
||||
assertContent: false,
|
||||
}
|
||||
}
|
||||
|
||||
// DefaultDraft overrides the draft used to
|
||||
// compile schemas without `$schema` field.
|
||||
//
|
||||
// By default, this library uses the latest
|
||||
// draft supported.
|
||||
//
|
||||
// The use of this option is HIGHLY encouraged
|
||||
// to ensure continued correct operation of your
|
||||
// schema. The current default value will not stay
|
||||
// the same overtime.
|
||||
func (c *Compiler) DefaultDraft(d *Draft) {
|
||||
c.roots.defaultDraft = d
|
||||
}
|
||||
|
||||
// AssertFormat always enables format assertions.
|
||||
//
|
||||
// Default Behavior:
|
||||
// for draft-07: enabled.
|
||||
// for draft/2019-09: disabled unless metaschema says `format` vocabulary is required.
|
||||
// for draft/2020-12: disabled unless metaschema says `format-assertion` vocabulary is required.
|
||||
func (c *Compiler) AssertFormat() {
|
||||
c.assertFormat = true
|
||||
}
|
||||
|
||||
// AssertContent enables content assertions.
|
||||
//
|
||||
// Content assertions include keywords:
|
||||
// - contentEncoding
|
||||
// - contentMediaType
|
||||
// - contentSchema
|
||||
//
|
||||
// Default behavior is always disabled.
|
||||
func (c *Compiler) AssertContent() {
|
||||
c.assertContent = true
|
||||
}
|
||||
|
||||
// RegisterFormat registers custom format.
|
||||
//
|
||||
// NOTE:
|
||||
// - "regex" format can not be overridden
|
||||
// - format assertions are disabled for draft >= 2019-09
|
||||
// see [Compiler.AssertFormat]
|
||||
func (c *Compiler) RegisterFormat(f *Format) {
|
||||
if f.Name != "regex" {
|
||||
c.formats[f.Name] = f
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterContentEncoding registers custom contentEncoding.
|
||||
//
|
||||
// NOTE: content assertions are disabled by default.
|
||||
// see [Compiler.AssertContent].
|
||||
func (c *Compiler) RegisterContentEncoding(d *Decoder) {
|
||||
c.decoders[d.Name] = d
|
||||
}
|
||||
|
||||
// RegisterContentMediaType registers custom contentMediaType.
|
||||
//
|
||||
// NOTE: content assertions are disabled by default.
|
||||
// see [Compiler.AssertContent].
|
||||
func (c *Compiler) RegisterContentMediaType(mt *MediaType) {
|
||||
c.mediaTypes[mt.Name] = mt
|
||||
}
|
||||
|
||||
// RegisterVocabulary registers custom vocabulary.
|
||||
//
|
||||
// NOTE:
|
||||
// - vocabularies are disabled for draft >= 2019-09
|
||||
// see [Compiler.AssertVocabs]
|
||||
func (c *Compiler) RegisterVocabulary(vocab *Vocabulary) {
|
||||
c.roots.vocabularies[vocab.URL] = vocab
|
||||
}
|
||||
|
||||
// AssertVocabs always enables user-defined vocabularies assertions.
|
||||
//
|
||||
// Default Behavior:
|
||||
// for draft-07: enabled.
|
||||
// for draft/2019-09: disabled unless metaschema enables a vocabulary.
|
||||
// for draft/2020-12: disabled unless metaschema enables a vocabulary.
|
||||
func (c *Compiler) AssertVocabs() {
|
||||
c.roots.assertVocabs = true
|
||||
}
|
||||
|
||||
// AddResource adds schema resource which gets used later in reference
|
||||
// resolution.
|
||||
//
|
||||
// The argument url can be file path or url. Any fragment in url is ignored.
|
||||
// The argument doc must be valid json value.
|
||||
func (c *Compiler) AddResource(url string, doc any) error {
|
||||
uf, err := absolute(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if isMeta(string(uf.url)) {
|
||||
return &ResourceExistsError{string(uf.url)}
|
||||
}
|
||||
if !c.roots.loader.add(uf.url, doc) {
|
||||
return &ResourceExistsError{string(uf.url)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UseLoader overrides the default [URLLoader] used
|
||||
// to load schema resources.
|
||||
func (c *Compiler) UseLoader(loader URLLoader) {
|
||||
c.roots.loader.loader = loader
|
||||
}
|
||||
|
||||
// UseRegexpEngine changes the regexp-engine used.
|
||||
// By default it uses regexp package from go standard
|
||||
// library.
|
||||
//
|
||||
// NOTE: must be called before compiling any schemas.
|
||||
func (c *Compiler) UseRegexpEngine(engine RegexpEngine) {
|
||||
if engine == nil {
|
||||
engine = goRegexpCompile
|
||||
}
|
||||
c.roots.regexpEngine = engine
|
||||
}
|
||||
|
||||
func (c *Compiler) enqueue(q *queue, up urlPtr) *Schema {
|
||||
if sch, ok := c.schemas[up]; ok {
|
||||
// already got compiled
|
||||
return sch
|
||||
}
|
||||
if sch := q.get(up); sch != nil {
|
||||
return sch
|
||||
}
|
||||
sch := newSchema(up)
|
||||
q.append(sch)
|
||||
return sch
|
||||
}
|
||||
|
||||
// MustCompile is like [Compile] but panics if compilation fails.
|
||||
// It simplifies safe initialization of global variables holding
|
||||
// compiled schema.
|
||||
func (c *Compiler) MustCompile(loc string) *Schema {
|
||||
sch, err := c.Compile(loc)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("jsonschema: Compile(%q): %v", loc, err))
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
// Compile compiles json-schema at given loc.
|
||||
func (c *Compiler) Compile(loc string) (*Schema, error) {
|
||||
uf, err := absolute(loc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
up, err := c.roots.resolveFragment(*uf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.doCompile(up)
|
||||
}
|
||||
|
||||
func (c *Compiler) doCompile(up urlPtr) (*Schema, error) {
|
||||
q := &queue{}
|
||||
compiled := 0
|
||||
|
||||
c.enqueue(q, up)
|
||||
for q.len() > compiled {
|
||||
sch := q.at(compiled)
|
||||
if err := c.roots.ensureSubschema(sch.up); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := c.roots.roots[sch.up.url]
|
||||
v, err := sch.up.lookup(r.doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := c.compileValue(v, sch, r, q); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
compiled++
|
||||
}
|
||||
for _, sch := range *q {
|
||||
c.schemas[sch.up] = sch
|
||||
}
|
||||
return c.schemas[up], nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compileValue(v any, sch *Schema, r *root, q *queue) error {
|
||||
res := r.resource(sch.up.ptr)
|
||||
sch.DraftVersion = res.dialect.draft.version
|
||||
|
||||
base := urlPtr{sch.up.url, res.ptr}
|
||||
sch.resource = c.enqueue(q, base)
|
||||
|
||||
// if resource, enqueue dynamic anchors for compilation
|
||||
if sch.DraftVersion >= 2020 && sch.up == sch.resource.up {
|
||||
res := r.resource(sch.up.ptr)
|
||||
for anchor, anchorPtr := range res.anchors {
|
||||
if slices.Contains(res.dynamicAnchors, anchor) {
|
||||
up := urlPtr{sch.up.url, anchorPtr}
|
||||
danchorSch := c.enqueue(q, up)
|
||||
if sch.dynamicAnchors == nil {
|
||||
sch.dynamicAnchors = map[string]*Schema{}
|
||||
}
|
||||
sch.dynamicAnchors[string(anchor)] = danchorSch
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch v := v.(type) {
|
||||
case bool:
|
||||
sch.Bool = &v
|
||||
case map[string]any:
|
||||
if err := c.compileObject(v, sch, r, q); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
sch.allPropsEvaluated = sch.AdditionalProperties != nil
|
||||
if sch.DraftVersion < 2020 {
|
||||
sch.allItemsEvaluated = sch.AdditionalItems != nil
|
||||
switch items := sch.Items.(type) {
|
||||
case *Schema:
|
||||
sch.allItemsEvaluated = true
|
||||
case []*Schema:
|
||||
sch.numItemsEvaluated = len(items)
|
||||
}
|
||||
} else {
|
||||
sch.allItemsEvaluated = sch.Items2020 != nil
|
||||
sch.numItemsEvaluated = len(sch.PrefixItems)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compileObject(obj map[string]any, sch *Schema, r *root, q *queue) error {
|
||||
if len(obj) == 0 {
|
||||
b := true
|
||||
sch.Bool = &b
|
||||
return nil
|
||||
}
|
||||
oc := objCompiler{
|
||||
c: c,
|
||||
obj: obj,
|
||||
up: sch.up,
|
||||
r: r,
|
||||
res: r.resource(sch.up.ptr),
|
||||
q: q,
|
||||
}
|
||||
return oc.compile(sch)
|
||||
}
|
||||
|
||||
// queue --
|
||||
|
||||
type queue []*Schema
|
||||
|
||||
func (q *queue) append(sch *Schema) {
|
||||
*q = append(*q, sch)
|
||||
}
|
||||
|
||||
func (q *queue) at(i int) *Schema {
|
||||
return (*q)[i]
|
||||
}
|
||||
|
||||
func (q *queue) len() int {
|
||||
return len(*q)
|
||||
}
|
||||
|
||||
func (q *queue) get(up urlPtr) *Schema {
|
||||
i := slices.IndexFunc(*q, func(sch *Schema) bool { return sch.up == up })
|
||||
if i != -1 {
|
||||
return (*q)[i]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// regexp --
|
||||
|
||||
// Regexp is the representation of compiled regular expression.
|
||||
type Regexp interface {
|
||||
fmt.Stringer
|
||||
|
||||
// MatchString reports whether the string s contains
|
||||
// any match of the regular expression.
|
||||
MatchString(string) bool
|
||||
}
|
||||
|
||||
// RegexpEngine parses a regular expression and returns,
|
||||
// if successful, a Regexp object that can be used to
|
||||
// match against text.
|
||||
type RegexpEngine func(string) (Regexp, error)
|
||||
|
||||
func (re RegexpEngine) validate(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
_, err := re(s)
|
||||
return err
|
||||
}
|
||||
|
||||
func goRegexpCompile(s string) (Regexp, error) {
|
||||
return regexp.Compile(s)
|
||||
}
|
||||
51
vendor/github.com/santhosh-tekuri/jsonschema/v6/content.go
generated
vendored
Normal file
51
vendor/github.com/santhosh-tekuri/jsonschema/v6/content.go
generated
vendored
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
// Decoder specifies how to decode specific contentEncoding.
|
||||
type Decoder struct {
|
||||
// Name of contentEncoding.
|
||||
Name string
|
||||
// Decode given string to byte array.
|
||||
Decode func(string) ([]byte, error)
|
||||
}
|
||||
|
||||
var decoders = map[string]*Decoder{
|
||||
"base64": {
|
||||
Name: "base64",
|
||||
Decode: func(s string) ([]byte, error) {
|
||||
return base64.StdEncoding.DecodeString(s)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// MediaType specified how to validate bytes against specific contentMediaType.
|
||||
type MediaType struct {
|
||||
// Name of contentMediaType.
|
||||
Name string
|
||||
|
||||
// Validate checks whether bytes conform to this mediatype.
|
||||
Validate func([]byte) error
|
||||
|
||||
// UnmarshalJSON unmarshals bytes into json value.
|
||||
// This must be nil if this mediatype is not compatible
|
||||
// with json.
|
||||
UnmarshalJSON func([]byte) (any, error)
|
||||
}
|
||||
|
||||
var mediaTypes = map[string]*MediaType{
|
||||
"application/json": {
|
||||
Name: "application/json",
|
||||
Validate: func(b []byte) error {
|
||||
var v any
|
||||
return json.Unmarshal(b, &v)
|
||||
},
|
||||
UnmarshalJSON: func(b []byte) (any, error) {
|
||||
return UnmarshalJSON(bytes.NewReader(b))
|
||||
},
|
||||
},
|
||||
}
|
||||
360
vendor/github.com/santhosh-tekuri/jsonschema/v6/draft.go
generated
vendored
Normal file
360
vendor/github.com/santhosh-tekuri/jsonschema/v6/draft.go
generated
vendored
Normal file
|
|
@ -0,0 +1,360 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Draft represents json-schema specification.
|
||||
type Draft struct {
|
||||
version int
|
||||
url string
|
||||
sch *Schema
|
||||
id string // property name used to represent id
|
||||
subschemas []SchemaPath // locations of subschemas
|
||||
vocabPrefix string // prefix used for vocabulary
|
||||
allVocabs map[string]*Schema // names of supported vocabs with its schemas
|
||||
defaultVocabs []string // names of default vocabs
|
||||
}
|
||||
|
||||
// String returns the specification url.
|
||||
func (d *Draft) String() string {
|
||||
return d.url
|
||||
}
|
||||
|
||||
var (
|
||||
Draft4 = &Draft{
|
||||
version: 4,
|
||||
url: "http://json-schema.org/draft-04/schema",
|
||||
id: "id",
|
||||
subschemas: []SchemaPath{
|
||||
// type agonistic
|
||||
schemaPath("definitions/*"),
|
||||
schemaPath("not"),
|
||||
schemaPath("allOf/[]"),
|
||||
schemaPath("anyOf/[]"),
|
||||
schemaPath("oneOf/[]"),
|
||||
// object
|
||||
schemaPath("properties/*"),
|
||||
schemaPath("additionalProperties"),
|
||||
schemaPath("patternProperties/*"),
|
||||
// array
|
||||
schemaPath("items"),
|
||||
schemaPath("items/[]"),
|
||||
schemaPath("additionalItems"),
|
||||
schemaPath("dependencies/*"),
|
||||
},
|
||||
vocabPrefix: "",
|
||||
allVocabs: map[string]*Schema{},
|
||||
defaultVocabs: []string{},
|
||||
}
|
||||
|
||||
Draft6 = &Draft{
|
||||
version: 6,
|
||||
url: "http://json-schema.org/draft-06/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft4.subschemas,
|
||||
schemaPath("propertyNames"),
|
||||
schemaPath("contains"),
|
||||
),
|
||||
vocabPrefix: "",
|
||||
allVocabs: map[string]*Schema{},
|
||||
defaultVocabs: []string{},
|
||||
}
|
||||
|
||||
Draft7 = &Draft{
|
||||
version: 7,
|
||||
url: "http://json-schema.org/draft-07/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft6.subschemas,
|
||||
schemaPath("if"),
|
||||
schemaPath("then"),
|
||||
schemaPath("else"),
|
||||
),
|
||||
vocabPrefix: "",
|
||||
allVocabs: map[string]*Schema{},
|
||||
defaultVocabs: []string{},
|
||||
}
|
||||
|
||||
Draft2019 = &Draft{
|
||||
version: 2019,
|
||||
url: "https://json-schema.org/draft/2019-09/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft7.subschemas,
|
||||
schemaPath("$defs/*"),
|
||||
schemaPath("dependentSchemas/*"),
|
||||
schemaPath("unevaluatedProperties"),
|
||||
schemaPath("unevaluatedItems"),
|
||||
schemaPath("contentSchema"),
|
||||
),
|
||||
vocabPrefix: "https://json-schema.org/draft/2019-09/vocab/",
|
||||
allVocabs: map[string]*Schema{
|
||||
"core": nil,
|
||||
"applicator": nil,
|
||||
"validation": nil,
|
||||
"meta-data": nil,
|
||||
"format": nil,
|
||||
"content": nil,
|
||||
},
|
||||
defaultVocabs: []string{"core", "applicator", "validation"},
|
||||
}
|
||||
|
||||
Draft2020 = &Draft{
|
||||
version: 2020,
|
||||
url: "https://json-schema.org/draft/2020-12/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft2019.subschemas,
|
||||
schemaPath("prefixItems/[]"),
|
||||
),
|
||||
vocabPrefix: "https://json-schema.org/draft/2020-12/vocab/",
|
||||
allVocabs: map[string]*Schema{
|
||||
"core": nil,
|
||||
"applicator": nil,
|
||||
"unevaluated": nil,
|
||||
"validation": nil,
|
||||
"meta-data": nil,
|
||||
"format-annotation": nil,
|
||||
"format-assertion": nil,
|
||||
"content": nil,
|
||||
},
|
||||
defaultVocabs: []string{"core", "applicator", "unevaluated", "validation"},
|
||||
}
|
||||
|
||||
draftLatest = Draft2020
|
||||
)
|
||||
|
||||
func init() {
|
||||
c := NewCompiler()
|
||||
c.AssertFormat()
|
||||
for _, d := range []*Draft{Draft4, Draft6, Draft7, Draft2019, Draft2020} {
|
||||
d.sch = c.MustCompile(d.url)
|
||||
for name := range d.allVocabs {
|
||||
d.allVocabs[name] = c.MustCompile(strings.TrimSuffix(d.url, "schema") + "meta/" + name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func draftFromURL(url string) *Draft {
|
||||
u, frag := split(url)
|
||||
if frag != "" {
|
||||
return nil
|
||||
}
|
||||
u, ok := strings.CutPrefix(u, "http://")
|
||||
if !ok {
|
||||
u, _ = strings.CutPrefix(u, "https://")
|
||||
}
|
||||
switch u {
|
||||
case "json-schema.org/schema":
|
||||
return draftLatest
|
||||
case "json-schema.org/draft/2020-12/schema":
|
||||
return Draft2020
|
||||
case "json-schema.org/draft/2019-09/schema":
|
||||
return Draft2019
|
||||
case "json-schema.org/draft-07/schema":
|
||||
return Draft7
|
||||
case "json-schema.org/draft-06/schema":
|
||||
return Draft6
|
||||
case "json-schema.org/draft-04/schema":
|
||||
return Draft4
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Draft) getID(obj map[string]any) string {
|
||||
if d.version < 2019 {
|
||||
if _, ok := obj["$ref"]; ok {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
id, ok := strVal(obj, d.id)
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
id, _ = split(id) // ignore fragment
|
||||
return id
|
||||
}
|
||||
|
||||
func (d *Draft) getVocabs(url url, doc any, vocabularies map[string]*Vocabulary) ([]string, error) {
|
||||
if d.version < 2019 {
|
||||
return nil, nil
|
||||
}
|
||||
obj, ok := doc.(map[string]any)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
v, ok := obj["$vocabulary"]
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
obj, ok = v.(map[string]any)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var vocabs []string
|
||||
for vocab, reqd := range obj {
|
||||
if reqd, ok := reqd.(bool); !ok || !reqd {
|
||||
continue
|
||||
}
|
||||
name, ok := strings.CutPrefix(vocab, d.vocabPrefix)
|
||||
if ok {
|
||||
if _, ok := d.allVocabs[name]; ok {
|
||||
if !slices.Contains(vocabs, name) {
|
||||
vocabs = append(vocabs, name)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if _, ok := vocabularies[vocab]; !ok {
|
||||
return nil, &UnsupportedVocabularyError{url.String(), vocab}
|
||||
}
|
||||
if !slices.Contains(vocabs, vocab) {
|
||||
vocabs = append(vocabs, vocab)
|
||||
}
|
||||
}
|
||||
if !slices.Contains(vocabs, "core") {
|
||||
vocabs = append(vocabs, "core")
|
||||
}
|
||||
return vocabs, nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type dialect struct {
|
||||
draft *Draft
|
||||
vocabs []string // nil means use draft.defaultVocabs
|
||||
}
|
||||
|
||||
func (d *dialect) hasVocab(name string) bool {
|
||||
if name == "core" || d.draft.version < 2019 {
|
||||
return true
|
||||
}
|
||||
if d.vocabs != nil {
|
||||
return slices.Contains(d.vocabs, name)
|
||||
}
|
||||
return slices.Contains(d.draft.defaultVocabs, name)
|
||||
}
|
||||
|
||||
func (d *dialect) activeVocabs(assertVocabs bool, vocabularies map[string]*Vocabulary) []string {
|
||||
if len(vocabularies) == 0 {
|
||||
return d.vocabs
|
||||
}
|
||||
if d.draft.version < 2019 {
|
||||
assertVocabs = true
|
||||
}
|
||||
if !assertVocabs {
|
||||
return d.vocabs
|
||||
}
|
||||
var vocabs []string
|
||||
if d.vocabs == nil {
|
||||
vocabs = slices.Clone(d.draft.defaultVocabs)
|
||||
} else {
|
||||
vocabs = slices.Clone(d.vocabs)
|
||||
}
|
||||
for vocab := range vocabularies {
|
||||
if !slices.Contains(vocabs, vocab) {
|
||||
vocabs = append(vocabs, vocab)
|
||||
}
|
||||
}
|
||||
return vocabs
|
||||
}
|
||||
|
||||
func (d *dialect) getSchema(assertVocabs bool, vocabularies map[string]*Vocabulary) *Schema {
|
||||
vocabs := d.activeVocabs(assertVocabs, vocabularies)
|
||||
if vocabs == nil {
|
||||
return d.draft.sch
|
||||
}
|
||||
|
||||
var allOf []*Schema
|
||||
for _, vocab := range vocabs {
|
||||
sch := d.draft.allVocabs[vocab]
|
||||
if sch == nil {
|
||||
if v, ok := vocabularies[vocab]; ok {
|
||||
sch = v.Schema
|
||||
}
|
||||
}
|
||||
if sch != nil {
|
||||
allOf = append(allOf, sch)
|
||||
}
|
||||
}
|
||||
if !slices.Contains(vocabs, "core") {
|
||||
sch := d.draft.allVocabs["core"]
|
||||
if sch == nil {
|
||||
sch = d.draft.sch
|
||||
}
|
||||
allOf = append(allOf, sch)
|
||||
}
|
||||
sch := &Schema{
|
||||
Location: "urn:mem:metaschema",
|
||||
up: urlPtr{url("urn:mem:metaschema"), ""},
|
||||
DraftVersion: d.draft.version,
|
||||
AllOf: allOf,
|
||||
}
|
||||
sch.resource = sch
|
||||
if sch.DraftVersion >= 2020 {
|
||||
sch.DynamicAnchor = "meta"
|
||||
sch.dynamicAnchors = map[string]*Schema{
|
||||
"meta": sch,
|
||||
}
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ParseIDError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *ParseIDError) Error() string {
|
||||
return fmt.Sprintf("error in parsing id at %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ParseAnchorError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *ParseAnchorError) Error() string {
|
||||
return fmt.Sprintf("error in parsing anchor at %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DuplicateIDError struct {
|
||||
ID string
|
||||
URL string
|
||||
Ptr1 string
|
||||
Ptr2 string
|
||||
}
|
||||
|
||||
func (e *DuplicateIDError) Error() string {
|
||||
return fmt.Sprintf("duplicate id %q in %q at %q and %q", e.ID, e.URL, e.Ptr1, e.Ptr2)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DuplicateAnchorError struct {
|
||||
Anchor string
|
||||
URL string
|
||||
Ptr1 string
|
||||
Ptr2 string
|
||||
}
|
||||
|
||||
func (e *DuplicateAnchorError) Error() string {
|
||||
return fmt.Sprintf("duplicate anchor %q in %q at %q and %q", e.Anchor, e.URL, e.Ptr1, e.Ptr2)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func joinSubschemas(a1 []SchemaPath, a2 ...SchemaPath) []SchemaPath {
|
||||
var a []SchemaPath
|
||||
a = append(a, a1...)
|
||||
a = append(a, a2...)
|
||||
return a
|
||||
}
|
||||
708
vendor/github.com/santhosh-tekuri/jsonschema/v6/format.go
generated
vendored
Normal file
708
vendor/github.com/santhosh-tekuri/jsonschema/v6/format.go
generated
vendored
Normal file
|
|
@ -0,0 +1,708 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"net/netip"
|
||||
gourl "net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Format defined specific format.
|
||||
type Format struct {
|
||||
// Name of format.
|
||||
Name string
|
||||
|
||||
// Validate checks if given value is of this format.
|
||||
Validate func(v any) error
|
||||
}
|
||||
|
||||
var formats = map[string]*Format{
|
||||
"json-pointer": {"json-pointer", validateJSONPointer},
|
||||
"relative-json-pointer": {"relative-json-pointer", validateRelativeJSONPointer},
|
||||
"uuid": {"uuid", validateUUID},
|
||||
"duration": {"duration", validateDuration},
|
||||
"period": {"period", validatePeriod},
|
||||
"ipv4": {"ipv4", validateIPV4},
|
||||
"ipv6": {"ipv6", validateIPV6},
|
||||
"hostname": {"hostname", validateHostname},
|
||||
"email": {"email", validateEmail},
|
||||
"date": {"date", validateDate},
|
||||
"time": {"time", validateTime},
|
||||
"date-time": {"date-time", validateDateTime},
|
||||
"uri": {"uri", validateURI},
|
||||
"iri": {"iri", validateURI},
|
||||
"uri-reference": {"uri-reference", validateURIReference},
|
||||
"iri-reference": {"iri-reference", validateURIReference},
|
||||
"uri-template": {"uri-template", validateURITemplate},
|
||||
"semver": {"semver", validateSemver},
|
||||
}
|
||||
|
||||
// see https://www.rfc-editor.org/rfc/rfc6901#section-3
|
||||
func validateJSONPointer(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if s == "" {
|
||||
return nil
|
||||
}
|
||||
if !strings.HasPrefix(s, "/") {
|
||||
return LocalizableError("not starting with /")
|
||||
}
|
||||
for _, tok := range strings.Split(s, "/")[1:] {
|
||||
escape := false
|
||||
for _, ch := range tok {
|
||||
if escape {
|
||||
escape = false
|
||||
if ch != '0' && ch != '1' {
|
||||
return LocalizableError("~ must be followed by 0 or 1")
|
||||
}
|
||||
continue
|
||||
}
|
||||
if ch == '~' {
|
||||
escape = true
|
||||
continue
|
||||
}
|
||||
switch {
|
||||
case ch >= '\x00' && ch <= '\x2E':
|
||||
case ch >= '\x30' && ch <= '\x7D':
|
||||
case ch >= '\x7F' && ch <= '\U0010FFFF':
|
||||
default:
|
||||
return LocalizableError("invalid character %q", ch)
|
||||
}
|
||||
}
|
||||
if escape {
|
||||
return LocalizableError("~ must be followed by 0 or 1")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
func validateRelativeJSONPointer(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// start with non-negative-integer
|
||||
numDigits := 0
|
||||
for _, ch := range s {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
numDigits++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if numDigits == 0 {
|
||||
return LocalizableError("must start with non-negative integer")
|
||||
}
|
||||
if numDigits > 1 && strings.HasPrefix(s, "0") {
|
||||
return LocalizableError("starts with zero")
|
||||
}
|
||||
s = s[numDigits:]
|
||||
|
||||
// followed by either json-pointer or '#'
|
||||
if s == "#" {
|
||||
return nil
|
||||
}
|
||||
return validateJSONPointer(s)
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc4122#page-4
|
||||
func validateUUID(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
hexGroups := []int{8, 4, 4, 4, 12}
|
||||
groups := strings.Split(s, "-")
|
||||
if len(groups) != len(hexGroups) {
|
||||
return LocalizableError("must have %d elements", len(hexGroups))
|
||||
}
|
||||
for i, group := range groups {
|
||||
if len(group) != hexGroups[i] {
|
||||
return LocalizableError("element %d must be %d characters long", i+1, hexGroups[i])
|
||||
}
|
||||
for _, ch := range group {
|
||||
switch {
|
||||
case ch >= '0' && ch <= '9':
|
||||
case ch >= 'a' && ch <= 'f':
|
||||
case ch >= 'A' && ch <= 'F':
|
||||
default:
|
||||
return LocalizableError("non-hex character %q", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A
|
||||
func validateDuration(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// must start with 'P'
|
||||
s, ok = strings.CutPrefix(s, "P")
|
||||
if !ok {
|
||||
return LocalizableError("must start with P")
|
||||
}
|
||||
if s == "" {
|
||||
return LocalizableError("nothing after P")
|
||||
}
|
||||
|
||||
// dur-week
|
||||
if s, ok := strings.CutSuffix(s, "W"); ok {
|
||||
if s == "" {
|
||||
return LocalizableError("no number in week")
|
||||
}
|
||||
for _, ch := range s {
|
||||
if ch < '0' || ch > '9' {
|
||||
return LocalizableError("invalid week")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
allUnits := []string{"YMD", "HMS"}
|
||||
for i, s := range strings.Split(s, "T") {
|
||||
if i != 0 && s == "" {
|
||||
return LocalizableError("no time elements")
|
||||
}
|
||||
if i >= len(allUnits) {
|
||||
return LocalizableError("more than one T")
|
||||
}
|
||||
units := allUnits[i]
|
||||
for s != "" {
|
||||
digitCount := 0
|
||||
for _, ch := range s {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
digitCount++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if digitCount == 0 {
|
||||
return LocalizableError("missing number")
|
||||
}
|
||||
s = s[digitCount:]
|
||||
if s == "" {
|
||||
return LocalizableError("missing unit")
|
||||
}
|
||||
unit := s[0]
|
||||
j := strings.IndexByte(units, unit)
|
||||
if j == -1 {
|
||||
if strings.IndexByte(allUnits[i], unit) != -1 {
|
||||
return LocalizableError("unit %q out of order", unit)
|
||||
}
|
||||
return LocalizableError("invalid unit %q", unit)
|
||||
}
|
||||
units = units[j+1:]
|
||||
s = s[1:]
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateIPV4(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
groups := strings.Split(s, ".")
|
||||
if len(groups) != 4 {
|
||||
return LocalizableError("expected four decimals")
|
||||
}
|
||||
for _, group := range groups {
|
||||
if len(group) > 1 && group[0] == '0' {
|
||||
return LocalizableError("leading zeros")
|
||||
}
|
||||
n, err := strconv.Atoi(group)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if n < 0 || n > 255 {
|
||||
return LocalizableError("decimal must be between 0 and 255")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateIPV6(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if !strings.Contains(s, ":") {
|
||||
return LocalizableError("missing colon")
|
||||
}
|
||||
addr, err := netip.ParseAddr(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if addr.Zone() != "" {
|
||||
return LocalizableError("zone id is not a part of ipv6 address")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names
|
||||
func validateHostname(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters
|
||||
s = strings.TrimSuffix(s, ".")
|
||||
if len(s) > 253 {
|
||||
return LocalizableError("more than 253 characters long")
|
||||
}
|
||||
|
||||
// Hostnames are composed of series of labels concatenated with dots, as are all domain names
|
||||
for _, label := range strings.Split(s, ".") {
|
||||
// Each label must be from 1 to 63 characters long
|
||||
if len(label) < 1 || len(label) > 63 {
|
||||
return LocalizableError("label must be 1 to 63 characters long")
|
||||
}
|
||||
|
||||
// labels must not start or end with a hyphen
|
||||
if strings.HasPrefix(label, "-") {
|
||||
return LocalizableError("label starts with hyphen")
|
||||
}
|
||||
if strings.HasSuffix(label, "-") {
|
||||
return LocalizableError("label ends with hyphen")
|
||||
}
|
||||
|
||||
// labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner),
|
||||
// the digits '0' through '9', and the hyphen ('-')
|
||||
for _, ch := range label {
|
||||
switch {
|
||||
case ch >= 'a' && ch <= 'z':
|
||||
case ch >= 'A' && ch <= 'Z':
|
||||
case ch >= '0' && ch <= '9':
|
||||
case ch == '-':
|
||||
default:
|
||||
return LocalizableError("invalid character %q", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://en.wikipedia.org/wiki/Email_address
|
||||
func validateEmail(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
// entire email address to be no more than 254 characters long
|
||||
if len(s) > 254 {
|
||||
return LocalizableError("more than 255 characters long")
|
||||
}
|
||||
|
||||
// email address is generally recognized as having two parts joined with an at-sign
|
||||
at := strings.LastIndexByte(s, '@')
|
||||
if at == -1 {
|
||||
return LocalizableError("missing @")
|
||||
}
|
||||
local, domain := s[:at], s[at+1:]
|
||||
|
||||
// local part may be up to 64 characters long
|
||||
if len(local) > 64 {
|
||||
return LocalizableError("local part more than 64 characters long")
|
||||
}
|
||||
|
||||
if len(local) > 1 && strings.HasPrefix(local, `"`) && strings.HasPrefix(local, `"`) {
|
||||
// quoted
|
||||
local := local[1 : len(local)-1]
|
||||
if strings.IndexByte(local, '\\') != -1 || strings.IndexByte(local, '"') != -1 {
|
||||
return LocalizableError("backslash and quote are not allowed within quoted local part")
|
||||
}
|
||||
} else {
|
||||
// unquoted
|
||||
if strings.HasPrefix(local, ".") {
|
||||
return LocalizableError("starts with dot")
|
||||
}
|
||||
if strings.HasSuffix(local, ".") {
|
||||
return LocalizableError("ends with dot")
|
||||
}
|
||||
|
||||
// consecutive dots not allowed
|
||||
if strings.Contains(local, "..") {
|
||||
return LocalizableError("consecutive dots")
|
||||
}
|
||||
|
||||
// check allowed chars
|
||||
for _, ch := range local {
|
||||
switch {
|
||||
case ch >= 'a' && ch <= 'z':
|
||||
case ch >= 'A' && ch <= 'Z':
|
||||
case ch >= '0' && ch <= '9':
|
||||
case strings.ContainsRune(".!#$%&'*+-/=?^_`{|}~", ch):
|
||||
default:
|
||||
return LocalizableError("invalid character %q", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// domain if enclosed in brackets, must match an IP address
|
||||
if strings.HasPrefix(domain, "[") && strings.HasSuffix(domain, "]") {
|
||||
domain = domain[1 : len(domain)-1]
|
||||
if rem, ok := strings.CutPrefix(domain, "IPv6:"); ok {
|
||||
if err := validateIPV6(rem); err != nil {
|
||||
return LocalizableError("invalid ipv6 address: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := validateIPV4(domain); err != nil {
|
||||
return LocalizableError("invalid ipv4 address: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// domain must match the requirements for a hostname
|
||||
if err := validateHostname(domain); err != nil {
|
||||
return LocalizableError("invalid domain: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// see see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
|
||||
func validateDate(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
_, err := time.Parse("2006-01-02", s)
|
||||
return err
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
|
||||
// NOTE: golang time package does not support leap seconds.
|
||||
func validateTime(v any) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// min: hh:mm:ssZ
|
||||
if len(str) < 9 {
|
||||
return LocalizableError("less than 9 characters long")
|
||||
}
|
||||
if str[2] != ':' || str[5] != ':' {
|
||||
return LocalizableError("missing colon in correct place")
|
||||
}
|
||||
|
||||
// parse hh:mm:ss
|
||||
var hms []int
|
||||
for _, tok := range strings.SplitN(str[:8], ":", 3) {
|
||||
i, err := strconv.Atoi(tok)
|
||||
if err != nil {
|
||||
return LocalizableError("invalid hour/min/sec")
|
||||
}
|
||||
if i < 0 {
|
||||
return LocalizableError("non-positive hour/min/sec")
|
||||
}
|
||||
hms = append(hms, i)
|
||||
}
|
||||
if len(hms) != 3 {
|
||||
return LocalizableError("missing hour/min/sec")
|
||||
}
|
||||
h, m, s := hms[0], hms[1], hms[2]
|
||||
if h > 23 || m > 59 || s > 60 {
|
||||
return LocalizableError("hour/min/sec out of range")
|
||||
}
|
||||
str = str[8:]
|
||||
|
||||
// parse sec-frac if present
|
||||
if rem, ok := strings.CutPrefix(str, "."); ok {
|
||||
numDigits := 0
|
||||
for _, ch := range rem {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
numDigits++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if numDigits == 0 {
|
||||
return LocalizableError("no digits in second fraction")
|
||||
}
|
||||
str = rem[numDigits:]
|
||||
}
|
||||
|
||||
if str != "z" && str != "Z" {
|
||||
// parse time-numoffset
|
||||
if len(str) != 6 {
|
||||
return LocalizableError("offset must be 6 characters long")
|
||||
}
|
||||
var sign int
|
||||
switch str[0] {
|
||||
case '+':
|
||||
sign = -1
|
||||
case '-':
|
||||
sign = +1
|
||||
default:
|
||||
return LocalizableError("offset must begin with plus/minus")
|
||||
}
|
||||
str = str[1:]
|
||||
if str[2] != ':' {
|
||||
return LocalizableError("missing colon in offset in correct place")
|
||||
}
|
||||
|
||||
var zhm []int
|
||||
for _, tok := range strings.SplitN(str, ":", 2) {
|
||||
i, err := strconv.Atoi(tok)
|
||||
if err != nil {
|
||||
return LocalizableError("invalid hour/min in offset")
|
||||
}
|
||||
if i < 0 {
|
||||
return LocalizableError("non-positive hour/min in offset")
|
||||
}
|
||||
zhm = append(zhm, i)
|
||||
}
|
||||
zh, zm := zhm[0], zhm[1]
|
||||
if zh > 23 || zm > 59 {
|
||||
return LocalizableError("hour/min in offset out of range")
|
||||
}
|
||||
|
||||
// apply timezone
|
||||
hm := (h*60 + m) + sign*(zh*60+zm)
|
||||
if hm < 0 {
|
||||
hm += 24 * 60
|
||||
}
|
||||
h, m = hm/60, hm%60
|
||||
}
|
||||
|
||||
// check leap second
|
||||
if s >= 60 && (h != 23 || m != 59) {
|
||||
return LocalizableError("invalid leap second")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
|
||||
func validateDateTime(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// min: yyyy-mm-ddThh:mm:ssZ
|
||||
if len(s) < 20 {
|
||||
return LocalizableError("less than 20 characters long")
|
||||
}
|
||||
|
||||
if s[10] != 't' && s[10] != 'T' {
|
||||
return LocalizableError("11th character must be t or T")
|
||||
}
|
||||
if err := validateDate(s[:10]); err != nil {
|
||||
return LocalizableError("invalid date element: %v", err)
|
||||
}
|
||||
if err := validateTime(s[11:]); err != nil {
|
||||
return LocalizableError("invalid time element: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseURL(s string) (*gourl.URL, error) {
|
||||
u, err := gourl.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// gourl does not validate ipv6 host address
|
||||
hostName := u.Hostname()
|
||||
if strings.Contains(hostName, ":") {
|
||||
if !strings.Contains(u.Host, "[") || !strings.Contains(u.Host, "]") {
|
||||
return nil, LocalizableError("ipv6 address not enclosed in brackets")
|
||||
}
|
||||
if err := validateIPV6(hostName); err != nil {
|
||||
return nil, LocalizableError("invalid ipv6 address: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return u, nil
|
||||
}
|
||||
|
||||
func validateURI(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
u, err := parseURL(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !u.IsAbs() {
|
||||
return LocalizableError("relative url")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateURIReference(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if strings.Contains(s, `\`) {
|
||||
return LocalizableError(`contains \`)
|
||||
}
|
||||
_, err := parseURL(s)
|
||||
return err
|
||||
}
|
||||
|
||||
func validateURITemplate(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
u, err := parseURL(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, tok := range strings.Split(u.RawPath, "/") {
|
||||
tok, err = decode(tok)
|
||||
if err != nil {
|
||||
return LocalizableError("percent decode failed: %v", err)
|
||||
}
|
||||
want := true
|
||||
for _, ch := range tok {
|
||||
var got bool
|
||||
switch ch {
|
||||
case '{':
|
||||
got = true
|
||||
case '}':
|
||||
got = false
|
||||
default:
|
||||
continue
|
||||
}
|
||||
if got != want {
|
||||
return LocalizableError("nested curly braces")
|
||||
}
|
||||
want = !want
|
||||
}
|
||||
if !want {
|
||||
return LocalizableError("no matching closing brace")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validatePeriod(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
slash := strings.IndexByte(s, '/')
|
||||
if slash == -1 {
|
||||
return LocalizableError("missing slash")
|
||||
}
|
||||
|
||||
start, end := s[:slash], s[slash+1:]
|
||||
if strings.HasPrefix(start, "P") {
|
||||
if err := validateDuration(start); err != nil {
|
||||
return LocalizableError("invalid start duration: %v", err)
|
||||
}
|
||||
if err := validateDateTime(end); err != nil {
|
||||
return LocalizableError("invalid end date-time: %v", err)
|
||||
}
|
||||
} else {
|
||||
if err := validateDateTime(start); err != nil {
|
||||
return LocalizableError("invalid start date-time: %v", err)
|
||||
}
|
||||
if strings.HasPrefix(end, "P") {
|
||||
if err := validateDuration(end); err != nil {
|
||||
return LocalizableError("invalid end duration: %v", err)
|
||||
}
|
||||
} else if err := validateDateTime(end); err != nil {
|
||||
return LocalizableError("invalid end date-time: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://semver.org/#backusnaur-form-grammar-for-valid-semver-versions
|
||||
func validateSemver(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// build --
|
||||
if i := strings.IndexByte(s, '+'); i != -1 {
|
||||
build := s[i+1:]
|
||||
if build == "" {
|
||||
return LocalizableError("build is empty")
|
||||
}
|
||||
for _, buildID := range strings.Split(build, ".") {
|
||||
if buildID == "" {
|
||||
return LocalizableError("build identifier is empty")
|
||||
}
|
||||
for _, ch := range buildID {
|
||||
switch {
|
||||
case ch >= '0' && ch <= '9':
|
||||
case (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '-':
|
||||
default:
|
||||
return LocalizableError("invalid character %q in build identifier", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
s = s[:i]
|
||||
}
|
||||
|
||||
// pre-release --
|
||||
if i := strings.IndexByte(s, '-'); i != -1 {
|
||||
preRelease := s[i+1:]
|
||||
for _, preReleaseID := range strings.Split(preRelease, ".") {
|
||||
if preReleaseID == "" {
|
||||
return LocalizableError("pre-release identifier is empty")
|
||||
}
|
||||
allDigits := true
|
||||
for _, ch := range preReleaseID {
|
||||
switch {
|
||||
case ch >= '0' && ch <= '9':
|
||||
case (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '-':
|
||||
allDigits = false
|
||||
default:
|
||||
return LocalizableError("invalid character %q in pre-release identifier", ch)
|
||||
}
|
||||
}
|
||||
if allDigits && len(preReleaseID) > 1 && preReleaseID[0] == '0' {
|
||||
return LocalizableError("pre-release numeric identifier starts with zero")
|
||||
}
|
||||
}
|
||||
s = s[:i]
|
||||
}
|
||||
|
||||
// versionCore --
|
||||
versions := strings.Split(s, ".")
|
||||
if len(versions) != 3 {
|
||||
return LocalizableError("versionCore must have 3 numbers separated by dot")
|
||||
}
|
||||
names := []string{"major", "minor", "patch"}
|
||||
for i, version := range versions {
|
||||
if version == "" {
|
||||
return LocalizableError("%s is empty", names[i])
|
||||
}
|
||||
if len(version) > 1 && version[0] == '0' {
|
||||
return LocalizableError("%s starts with zero", names[i])
|
||||
}
|
||||
for _, ch := range version {
|
||||
if ch < '0' || ch > '9' {
|
||||
return LocalizableError("%s contains non-digit", names[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
8
vendor/github.com/santhosh-tekuri/jsonschema/v6/go.work
generated
vendored
Normal file
8
vendor/github.com/santhosh-tekuri/jsonschema/v6/go.work
generated
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
go 1.21.1
|
||||
|
||||
use (
|
||||
.
|
||||
./cmd/jv
|
||||
)
|
||||
|
||||
replace github.com/santhosh-tekuri/jsonschema/v6 v6.0.0 => ./
|
||||
651
vendor/github.com/santhosh-tekuri/jsonschema/v6/kind/kind.go
generated
vendored
Normal file
651
vendor/github.com/santhosh-tekuri/jsonschema/v6/kind/kind.go
generated
vendored
Normal file
|
|
@ -0,0 +1,651 @@
|
|||
package kind
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/big"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
// --
|
||||
|
||||
type InvalidJsonValue struct {
|
||||
Value any
|
||||
}
|
||||
|
||||
func (*InvalidJsonValue) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *InvalidJsonValue) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("invalid jsonType %T", k.Value)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Schema struct {
|
||||
Location string
|
||||
}
|
||||
|
||||
func (*Schema) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *Schema) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("jsonschema validation failed with %s", quote(k.Location))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Group struct{}
|
||||
|
||||
func (*Group) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*Group) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("validation failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Not struct{}
|
||||
|
||||
func (*Not) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*Not) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("not failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AllOf struct{}
|
||||
|
||||
func (*AllOf) KeywordPath() []string {
|
||||
return []string{"allOf"}
|
||||
}
|
||||
|
||||
func (*AllOf) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("allOf failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AnyOf struct{}
|
||||
|
||||
func (*AnyOf) KeywordPath() []string {
|
||||
return []string{"anyOf"}
|
||||
}
|
||||
|
||||
func (*AnyOf) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("anyOf failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type OneOf struct {
|
||||
// Subschemas gives indexes of Subschemas that have matched.
|
||||
// Value nil, means none of the subschemas matched.
|
||||
Subschemas []int
|
||||
}
|
||||
|
||||
func (*OneOf) KeywordPath() []string {
|
||||
return []string{"oneOf"}
|
||||
}
|
||||
|
||||
func (k *OneOf) LocalizedString(p *message.Printer) string {
|
||||
if len(k.Subschemas) == 0 {
|
||||
return p.Sprintf("oneOf failed, none matched")
|
||||
}
|
||||
return p.Sprintf("oneOf failed, subschemas %d, %d matched", k.Subschemas[0], k.Subschemas[1])
|
||||
}
|
||||
|
||||
//--
|
||||
|
||||
type FalseSchema struct{}
|
||||
|
||||
func (*FalseSchema) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*FalseSchema) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("false schema")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type RefCycle struct {
|
||||
URL string
|
||||
KeywordLocation1 string
|
||||
KeywordLocation2 string
|
||||
}
|
||||
|
||||
func (*RefCycle) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *RefCycle) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("both %s and %s resolve to %q causing reference cycle", k.KeywordLocation1, k.KeywordLocation2, k.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Type struct {
|
||||
Got string
|
||||
Want []string
|
||||
}
|
||||
|
||||
func (*Type) KeywordPath() []string {
|
||||
return []string{"type"}
|
||||
}
|
||||
|
||||
func (k *Type) LocalizedString(p *message.Printer) string {
|
||||
want := strings.Join(k.Want, " or ")
|
||||
return p.Sprintf("got %s, want %s", k.Got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Enum struct {
|
||||
Got any
|
||||
Want []any
|
||||
}
|
||||
|
||||
// KeywordPath implements jsonschema.ErrorKind.
|
||||
func (*Enum) KeywordPath() []string {
|
||||
return []string{"enum"}
|
||||
}
|
||||
|
||||
func (k *Enum) LocalizedString(p *message.Printer) string {
|
||||
allPrimitive := true
|
||||
loop:
|
||||
for _, item := range k.Want {
|
||||
switch item.(type) {
|
||||
case []any, map[string]any:
|
||||
allPrimitive = false
|
||||
break loop
|
||||
}
|
||||
}
|
||||
if allPrimitive {
|
||||
if len(k.Want) == 1 {
|
||||
return p.Sprintf("value must be %s", display(k.Want[0]))
|
||||
}
|
||||
var want []string
|
||||
for _, v := range k.Want {
|
||||
want = append(want, display(v))
|
||||
}
|
||||
return p.Sprintf("value must be one of %s", strings.Join(want, ", "))
|
||||
}
|
||||
return p.Sprintf("enum failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Const struct {
|
||||
Got any
|
||||
Want any
|
||||
}
|
||||
|
||||
func (*Const) KeywordPath() []string {
|
||||
return []string{"const"}
|
||||
}
|
||||
|
||||
func (k *Const) LocalizedString(p *message.Printer) string {
|
||||
switch want := k.Want.(type) {
|
||||
case []any, map[string]any:
|
||||
return p.Sprintf("const failed")
|
||||
default:
|
||||
return p.Sprintf("value must be %s", display(want))
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Format struct {
|
||||
Got any
|
||||
Want string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (*Format) KeywordPath() []string {
|
||||
return []string{"format"}
|
||||
}
|
||||
|
||||
func (k *Format) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("%s is not valid %s: %v", display(k.Got), k.Want, localizedError(k.Err, p))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Reference struct {
|
||||
Keyword string
|
||||
URL string
|
||||
}
|
||||
|
||||
func (k *Reference) KeywordPath() []string {
|
||||
return []string{k.Keyword}
|
||||
}
|
||||
|
||||
func (*Reference) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("validation failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinProperties struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MinProperties) KeywordPath() []string {
|
||||
return []string{"minProperties"}
|
||||
}
|
||||
|
||||
func (k *MinProperties) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("minProperties: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxProperties struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MaxProperties) KeywordPath() []string {
|
||||
return []string{"maxProperties"}
|
||||
}
|
||||
|
||||
func (k *MaxProperties) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("maxProperties: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinItems struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MinItems) KeywordPath() []string {
|
||||
return []string{"minItems"}
|
||||
}
|
||||
|
||||
func (k *MinItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("minItems: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxItems struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MaxItems) KeywordPath() []string {
|
||||
return []string{"maxItems"}
|
||||
}
|
||||
|
||||
func (k *MaxItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("maxItems: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AdditionalItems struct {
|
||||
Count int
|
||||
}
|
||||
|
||||
func (*AdditionalItems) KeywordPath() []string {
|
||||
return []string{"additionalItems"}
|
||||
}
|
||||
|
||||
func (k *AdditionalItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("last %d additionalItem(s) not allowed", k.Count)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Required struct {
|
||||
Missing []string
|
||||
}
|
||||
|
||||
func (*Required) KeywordPath() []string {
|
||||
return []string{"required"}
|
||||
}
|
||||
|
||||
func (k *Required) LocalizedString(p *message.Printer) string {
|
||||
if len(k.Missing) == 1 {
|
||||
return p.Sprintf("missing property %s", quote(k.Missing[0]))
|
||||
}
|
||||
return p.Sprintf("missing properties %s", joinQuoted(k.Missing, ", "))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Dependency struct {
|
||||
Prop string // dependency of prop that failed
|
||||
Missing []string // missing props
|
||||
}
|
||||
|
||||
func (k *Dependency) KeywordPath() []string {
|
||||
return []string{"dependency", k.Prop}
|
||||
}
|
||||
|
||||
func (k *Dependency) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("properties %s required, if %s exists", joinQuoted(k.Missing, ", "), quote(k.Prop))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DependentRequired struct {
|
||||
Prop string // dependency of prop that failed
|
||||
Missing []string // missing props
|
||||
}
|
||||
|
||||
func (k *DependentRequired) KeywordPath() []string {
|
||||
return []string{"dependentRequired", k.Prop}
|
||||
}
|
||||
|
||||
func (k *DependentRequired) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("properties %s required, if %s exists", joinQuoted(k.Missing, ", "), quote(k.Prop))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AdditionalProperties struct {
|
||||
Properties []string
|
||||
}
|
||||
|
||||
func (*AdditionalProperties) KeywordPath() []string {
|
||||
return []string{"additionalProperties"}
|
||||
}
|
||||
|
||||
func (k *AdditionalProperties) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("additional properties %s not allowed", joinQuoted(k.Properties, ", "))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type PropertyNames struct {
|
||||
Property string
|
||||
}
|
||||
|
||||
func (*PropertyNames) KeywordPath() []string {
|
||||
return []string{"propertyNames"}
|
||||
}
|
||||
|
||||
func (k *PropertyNames) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("invalid propertyName %s", quote(k.Property))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type UniqueItems struct {
|
||||
Duplicates [2]int
|
||||
}
|
||||
|
||||
func (*UniqueItems) KeywordPath() []string {
|
||||
return []string{"uniqueItems"}
|
||||
}
|
||||
|
||||
func (k *UniqueItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("items at %d and %d are equal", k.Duplicates[0], k.Duplicates[1])
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Contains struct{}
|
||||
|
||||
func (*Contains) KeywordPath() []string {
|
||||
return []string{"contains"}
|
||||
}
|
||||
|
||||
func (*Contains) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("no items match contains schema")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinContains struct {
|
||||
Got []int
|
||||
Want int
|
||||
}
|
||||
|
||||
func (*MinContains) KeywordPath() []string {
|
||||
return []string{"minContains"}
|
||||
}
|
||||
|
||||
func (k *MinContains) LocalizedString(p *message.Printer) string {
|
||||
if len(k.Got) == 0 {
|
||||
return p.Sprintf("min %d items required to match contains schema, but none matched", k.Want)
|
||||
} else {
|
||||
got := fmt.Sprintf("%v", k.Got)
|
||||
return p.Sprintf("min %d items required to match contains schema, but matched %d items at %v", k.Want, len(k.Got), got[1:len(got)-1])
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxContains struct {
|
||||
Got []int
|
||||
Want int
|
||||
}
|
||||
|
||||
func (*MaxContains) KeywordPath() []string {
|
||||
return []string{"maxContains"}
|
||||
}
|
||||
|
||||
func (k *MaxContains) LocalizedString(p *message.Printer) string {
|
||||
got := fmt.Sprintf("%v", k.Got)
|
||||
return p.Sprintf("max %d items required to match contains schema, but matched %d items at %v", k.Want, len(k.Got), got[1:len(got)-1])
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinLength struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MinLength) KeywordPath() []string {
|
||||
return []string{"minLength"}
|
||||
}
|
||||
|
||||
func (k *MinLength) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("minLength: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxLength struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MaxLength) KeywordPath() []string {
|
||||
return []string{"maxLength"}
|
||||
}
|
||||
|
||||
func (k *MaxLength) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("maxLength: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Pattern struct {
|
||||
Got string
|
||||
Want string
|
||||
}
|
||||
|
||||
func (*Pattern) KeywordPath() []string {
|
||||
return []string{"pattern"}
|
||||
}
|
||||
|
||||
func (k *Pattern) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("%s does not match pattern %s", quote(k.Got), quote(k.Want))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ContentEncoding struct {
|
||||
Want string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (*ContentEncoding) KeywordPath() []string {
|
||||
return []string{"contentEncoding"}
|
||||
}
|
||||
|
||||
func (k *ContentEncoding) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("value is not %s encoded: %v", quote(k.Want), localizedError(k.Err, p))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ContentMediaType struct {
|
||||
Got []byte
|
||||
Want string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (*ContentMediaType) KeywordPath() []string {
|
||||
return []string{"contentMediaType"}
|
||||
}
|
||||
|
||||
func (k *ContentMediaType) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("value if not of mediatype %s: %v", quote(k.Want), k.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ContentSchema struct{}
|
||||
|
||||
func (*ContentSchema) KeywordPath() []string {
|
||||
return []string{"contentSchema"}
|
||||
}
|
||||
|
||||
func (*ContentSchema) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("contentSchema failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Minimum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*Minimum) KeywordPath() []string {
|
||||
return []string{"minimum"}
|
||||
}
|
||||
|
||||
func (k *Minimum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("minimum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Maximum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*Maximum) KeywordPath() []string {
|
||||
return []string{"maximum"}
|
||||
}
|
||||
|
||||
func (k *Maximum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("maximum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ExclusiveMinimum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*ExclusiveMinimum) KeywordPath() []string {
|
||||
return []string{"exclusiveMinimum"}
|
||||
}
|
||||
|
||||
func (k *ExclusiveMinimum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("exclusiveMinimum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ExclusiveMaximum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*ExclusiveMaximum) KeywordPath() []string {
|
||||
return []string{"exclusiveMaximum"}
|
||||
}
|
||||
|
||||
func (k *ExclusiveMaximum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("exclusiveMaximum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MultipleOf struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*MultipleOf) KeywordPath() []string {
|
||||
return []string{"multipleOf"}
|
||||
}
|
||||
|
||||
func (k *MultipleOf) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("multipleOf: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func quote(s string) string {
|
||||
s = fmt.Sprintf("%q", s)
|
||||
s = strings.ReplaceAll(s, `\"`, `"`)
|
||||
s = strings.ReplaceAll(s, `'`, `\'`)
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
|
||||
func joinQuoted(arr []string, sep string) string {
|
||||
var sb strings.Builder
|
||||
for _, s := range arr {
|
||||
if sb.Len() > 0 {
|
||||
sb.WriteString(sep)
|
||||
}
|
||||
sb.WriteString(quote(s))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// to be used only for primitive.
|
||||
func display(v any) string {
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
return quote(v)
|
||||
case []any, map[string]any:
|
||||
return "value"
|
||||
default:
|
||||
return fmt.Sprintf("%v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func localizedError(err error, p *message.Printer) string {
|
||||
if err, ok := err.(interface{ LocalizedError(*message.Printer) string }); ok {
|
||||
return err.LocalizedError(p)
|
||||
}
|
||||
return err.Error()
|
||||
}
|
||||
266
vendor/github.com/santhosh-tekuri/jsonschema/v6/loader.go
generated
vendored
Normal file
266
vendor/github.com/santhosh-tekuri/jsonschema/v6/loader.go
generated
vendored
Normal file
|
|
@ -0,0 +1,266 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
gourl "net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// URLLoader knows how to load json from given url.
|
||||
type URLLoader interface {
|
||||
// Load loads json from given absolute url.
|
||||
Load(url string) (any, error)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// FileLoader loads json file url.
|
||||
type FileLoader struct{}
|
||||
|
||||
func (l FileLoader) Load(url string) (any, error) {
|
||||
path, err := l.ToFile(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
return UnmarshalJSON(f)
|
||||
}
|
||||
|
||||
// ToFile is helper method to convert file url to file path.
|
||||
func (l FileLoader) ToFile(url string) (string, error) {
|
||||
u, err := gourl.Parse(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if u.Scheme != "file" {
|
||||
return "", fmt.Errorf("invalid file url: %s", u)
|
||||
}
|
||||
path := u.Path
|
||||
if runtime.GOOS == "windows" {
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
path = filepath.FromSlash(path)
|
||||
}
|
||||
return path, nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// SchemeURLLoader delegates to other [URLLoaders]
|
||||
// based on url scheme.
|
||||
type SchemeURLLoader map[string]URLLoader
|
||||
|
||||
func (l SchemeURLLoader) Load(url string) (any, error) {
|
||||
u, err := gourl.Parse(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ll, ok := l[u.Scheme]
|
||||
if !ok {
|
||||
return nil, &UnsupportedURLSchemeError{u.String()}
|
||||
}
|
||||
return ll.Load(url)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
//go:embed metaschemas
|
||||
var metaFS embed.FS
|
||||
|
||||
func openMeta(url string) (fs.File, error) {
|
||||
u, meta := strings.CutPrefix(url, "http://json-schema.org/")
|
||||
if !meta {
|
||||
u, meta = strings.CutPrefix(url, "https://json-schema.org/")
|
||||
}
|
||||
if meta {
|
||||
if u == "schema" {
|
||||
return openMeta(draftLatest.url)
|
||||
}
|
||||
f, err := metaFS.Open("metaschemas/" + u)
|
||||
if err != nil {
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return f, err
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
}
|
||||
|
||||
func isMeta(url string) bool {
|
||||
f, err := openMeta(url)
|
||||
if err != nil {
|
||||
return true
|
||||
}
|
||||
if f != nil {
|
||||
f.Close()
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func loadMeta(url string) (any, error) {
|
||||
f, err := openMeta(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if f == nil {
|
||||
return nil, nil
|
||||
}
|
||||
defer f.Close()
|
||||
return UnmarshalJSON(f)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type defaultLoader struct {
|
||||
docs map[url]any // docs loaded so far
|
||||
loader URLLoader
|
||||
}
|
||||
|
||||
func (l *defaultLoader) add(url url, doc any) bool {
|
||||
if _, ok := l.docs[url]; ok {
|
||||
return false
|
||||
}
|
||||
l.docs[url] = doc
|
||||
return true
|
||||
}
|
||||
|
||||
func (l *defaultLoader) load(url url) (any, error) {
|
||||
if doc, ok := l.docs[url]; ok {
|
||||
return doc, nil
|
||||
}
|
||||
doc, err := loadMeta(url.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if doc != nil {
|
||||
l.add(url, doc)
|
||||
return doc, nil
|
||||
}
|
||||
if l.loader == nil {
|
||||
return nil, &LoadURLError{url.String(), errors.New("no URLLoader set")}
|
||||
}
|
||||
doc, err = l.loader.Load(url.String())
|
||||
if err != nil {
|
||||
return nil, &LoadURLError{URL: url.String(), Err: err}
|
||||
}
|
||||
l.add(url, doc)
|
||||
return doc, nil
|
||||
}
|
||||
|
||||
func (l *defaultLoader) getDraft(up urlPtr, doc any, defaultDraft *Draft, cycle map[url]struct{}) (*Draft, error) {
|
||||
obj, ok := doc.(map[string]any)
|
||||
if !ok {
|
||||
return defaultDraft, nil
|
||||
}
|
||||
sch, ok := strVal(obj, "$schema")
|
||||
if !ok {
|
||||
return defaultDraft, nil
|
||||
}
|
||||
if draft := draftFromURL(sch); draft != nil {
|
||||
return draft, nil
|
||||
}
|
||||
sch, _ = split(sch)
|
||||
if _, err := gourl.Parse(sch); err != nil {
|
||||
return nil, &InvalidMetaSchemaURLError{up.String(), err}
|
||||
}
|
||||
schUrl := url(sch)
|
||||
if up.ptr.isEmpty() && schUrl == up.url {
|
||||
return nil, &UnsupportedDraftError{schUrl.String()}
|
||||
}
|
||||
if _, ok := cycle[schUrl]; ok {
|
||||
return nil, &MetaSchemaCycleError{schUrl.String()}
|
||||
}
|
||||
cycle[schUrl] = struct{}{}
|
||||
doc, err := l.load(schUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return l.getDraft(urlPtr{schUrl, ""}, doc, defaultDraft, cycle)
|
||||
}
|
||||
|
||||
func (l *defaultLoader) getMetaVocabs(doc any, draft *Draft, vocabularies map[string]*Vocabulary) ([]string, error) {
|
||||
obj, ok := doc.(map[string]any)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
sch, ok := strVal(obj, "$schema")
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
if draft := draftFromURL(sch); draft != nil {
|
||||
return nil, nil
|
||||
}
|
||||
sch, _ = split(sch)
|
||||
if _, err := gourl.Parse(sch); err != nil {
|
||||
return nil, &ParseURLError{sch, err}
|
||||
}
|
||||
schUrl := url(sch)
|
||||
doc, err := l.load(schUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return draft.getVocabs(schUrl, doc, vocabularies)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type LoadURLError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *LoadURLError) Error() string {
|
||||
return fmt.Sprintf("failing loading %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type UnsupportedURLSchemeError struct {
|
||||
url string
|
||||
}
|
||||
|
||||
func (e *UnsupportedURLSchemeError) Error() string {
|
||||
return fmt.Sprintf("no URLLoader registered for %q", e.url)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ResourceExistsError struct {
|
||||
url string
|
||||
}
|
||||
|
||||
func (e *ResourceExistsError) Error() string {
|
||||
return fmt.Sprintf("resource for %q already exists", e.url)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// UnmarshalJSON unmarshals into [any] without losing
|
||||
// number precision using [json.Number].
|
||||
func UnmarshalJSON(r io.Reader) (any, error) {
|
||||
decoder := json.NewDecoder(r)
|
||||
decoder.UseNumber()
|
||||
var doc any
|
||||
if err := decoder.Decode(&doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err := decoder.Token(); err == nil || err != io.EOF {
|
||||
return nil, fmt.Errorf("invalid character after top-level value")
|
||||
}
|
||||
return doc, nil
|
||||
}
|
||||
151
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-04/schema
generated
vendored
Normal file
151
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-04/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"positiveInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"positiveIntegerDefault0": {
|
||||
"allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uriref"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": {},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"exclusiveMinimum": true
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/positiveInteger" },
|
||||
"minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": {
|
||||
"anyOf": [
|
||||
{ "type": "boolean" },
|
||||
{ "$ref": "#" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/positiveInteger" },
|
||||
"minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxProperties": { "$ref": "#/definitions/positiveInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "type": "boolean" },
|
||||
{ "$ref": "#" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" },
|
||||
"format": { "type": "string" },
|
||||
"$ref": { "type": "string" }
|
||||
},
|
||||
"dependencies": {
|
||||
"exclusiveMaximum": [ "maximum" ],
|
||||
"exclusiveMinimum": [ "minimum" ]
|
||||
},
|
||||
"default": {}
|
||||
}
|
||||
150
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-06/schema
generated
vendored
Normal file
150
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-06/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-06/schema#",
|
||||
"$id": "http://json-schema.org/draft-06/schema#",
|
||||
"title": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"allOf": [
|
||||
{ "$ref": "#/definitions/nonNegativeInteger" },
|
||||
{ "default": 0 }
|
||||
]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": {},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": { "$ref": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"contains": { "$ref": "#" },
|
||||
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$ref": "#" },
|
||||
"const": {},
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"format": { "type": "string" },
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" }
|
||||
},
|
||||
"default": {}
|
||||
}
|
||||
172
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-07/schema
generated
vendored
Normal file
172
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-07/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"allOf": [
|
||||
{ "$ref": "#/definitions/nonNegativeInteger" },
|
||||
{ "default": 0 }
|
||||
]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": { "$ref": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": true
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"contains": { "$ref": "#" },
|
||||
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$ref": "#" },
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true,
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"format": { "type": "string" },
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentEncoding": { "type": "string" },
|
||||
"if": { "$ref": "#" },
|
||||
"then": { "$ref": "#" },
|
||||
"else": { "$ref": "#" },
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" }
|
||||
},
|
||||
"default": true
|
||||
}
|
||||
55
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/applicator
generated
vendored
Normal file
55
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/applicator
generated
vendored
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/applicator",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/applicator": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Applicator vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"additionalItems": { "$recursiveRef": "#" },
|
||||
"unevaluatedItems": { "$recursiveRef": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$recursiveRef": "#" },
|
||||
{ "$ref": "#/$defs/schemaArray" }
|
||||
]
|
||||
},
|
||||
"contains": { "$recursiveRef": "#" },
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"unevaluatedProperties": { "$recursiveRef": "#" },
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependentSchemas": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$recursiveRef": "#"
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$recursiveRef": "#" },
|
||||
"if": { "$recursiveRef": "#" },
|
||||
"then": { "$recursiveRef": "#" },
|
||||
"else": { "$recursiveRef": "#" },
|
||||
"allOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"anyOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"oneOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"not": { "$recursiveRef": "#" }
|
||||
},
|
||||
"$defs": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$recursiveRef": "#" }
|
||||
}
|
||||
}
|
||||
}
|
||||
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/content
generated
vendored
Normal file
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/content
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/content",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/content": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Content vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentEncoding": { "type": "string" },
|
||||
"contentSchema": { "$recursiveRef": "#" }
|
||||
}
|
||||
}
|
||||
56
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/core
generated
vendored
Normal file
56
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/core
generated
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/core",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/core": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Core vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference",
|
||||
"$comment": "Non-empty fragments not allowed.",
|
||||
"pattern": "^[^#]*#?$"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$anchor": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z][-A-Za-z0-9.:_]*$"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$recursiveRef": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$recursiveAnchor": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"$vocabulary": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"$defs": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"default": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/format
generated
vendored
Normal file
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/format
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/format",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/format": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Format vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"format": { "type": "string" }
|
||||
}
|
||||
}
|
||||
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/meta-data
generated
vendored
Normal file
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/meta-data
generated
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/meta-data",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/meta-data": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Meta-data vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"deprecated": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
}
|
||||
}
|
||||
}
|
||||
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/validation
generated
vendored
Normal file
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/validation
generated
vendored
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/validation",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/validation": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Validation vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minContains": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 1
|
||||
},
|
||||
"maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/$defs/stringArray" },
|
||||
"dependentRequired": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
}
|
||||
},
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/$defs/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/$defs/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 0
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
}
|
||||
}
|
||||
41
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/schema
generated
vendored
Normal file
41
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/core": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/applicator": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/validation": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/meta-data": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/format": false,
|
||||
"https://json-schema.org/draft/2019-09/vocab/content": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Core and Validation specifications meta-schema",
|
||||
"allOf": [
|
||||
{"$ref": "meta/core"},
|
||||
{"$ref": "meta/applicator"},
|
||||
{"$ref": "meta/validation"},
|
||||
{"$ref": "meta/meta-data"},
|
||||
{"$ref": "meta/format"},
|
||||
{"$ref": "meta/content"}
|
||||
],
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"definitions": {
|
||||
"$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.",
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$recursiveRef": "#" },
|
||||
{ "$ref": "meta/validation#/$defs/stringArray" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
47
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/applicator
generated
vendored
Normal file
47
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/applicator
generated
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/applicator",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/applicator": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Applicator vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"prefixItems": { "$ref": "#/$defs/schemaArray" },
|
||||
"items": { "$dynamicRef": "#meta" },
|
||||
"contains": { "$dynamicRef": "#meta" },
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependentSchemas": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"default": {}
|
||||
},
|
||||
"propertyNames": { "$dynamicRef": "#meta" },
|
||||
"if": { "$dynamicRef": "#meta" },
|
||||
"then": { "$dynamicRef": "#meta" },
|
||||
"else": { "$dynamicRef": "#meta" },
|
||||
"allOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"anyOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"oneOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"not": { "$dynamicRef": "#meta" }
|
||||
},
|
||||
"$defs": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
}
|
||||
}
|
||||
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/content
generated
vendored
Normal file
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/content
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/content",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/content": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Content vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"contentEncoding": { "type": "string" },
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentSchema": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
}
|
||||
50
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/core
generated
vendored
Normal file
50
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/core
generated
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/core",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/core": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Core vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"$ref": "#/$defs/uriReferenceString",
|
||||
"$comment": "Non-empty fragments not allowed.",
|
||||
"pattern": "^[^#]*#?$"
|
||||
},
|
||||
"$schema": { "$ref": "#/$defs/uriString" },
|
||||
"$ref": { "$ref": "#/$defs/uriReferenceString" },
|
||||
"$anchor": { "$ref": "#/$defs/anchorString" },
|
||||
"$dynamicRef": { "$ref": "#/$defs/uriReferenceString" },
|
||||
"$dynamicAnchor": { "$ref": "#/$defs/anchorString" },
|
||||
"$vocabulary": {
|
||||
"type": "object",
|
||||
"propertyNames": { "$ref": "#/$defs/uriString" },
|
||||
"additionalProperties": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"$defs": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"anchorString": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z_][-A-Za-z0-9._]*$"
|
||||
},
|
||||
"uriString": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"uriReferenceString": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
}
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-annotation
generated
vendored
Normal file
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-annotation
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/format-annotation",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-annotation": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Format vocabulary meta-schema for annotation results",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"format": { "type": "string" }
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-assertion
generated
vendored
Normal file
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-assertion
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/format-assertion",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-assertion": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Format vocabulary meta-schema for assertion results",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"format": { "type": "string" }
|
||||
}
|
||||
}
|
||||
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/meta-data
generated
vendored
Normal file
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/meta-data
generated
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/meta-data",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/meta-data": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Meta-data vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"deprecated": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
}
|
||||
}
|
||||
}
|
||||
14
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/unevaluated
generated
vendored
Normal file
14
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/unevaluated
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/unevaluated",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/unevaluated": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Unevaluated applicator vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"unevaluatedItems": { "$dynamicRef": "#meta" },
|
||||
"unevaluatedProperties": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
}
|
||||
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/validation
generated
vendored
Normal file
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/validation
generated
vendored
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/validation",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/validation": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Validation vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/$defs/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/$defs/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minContains": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 1
|
||||
},
|
||||
"maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/$defs/stringArray" },
|
||||
"dependentRequired": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
}
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 0
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
}
|
||||
}
|
||||
57
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/schema
generated
vendored
Normal file
57
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/core": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/applicator": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/unevaluated": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/validation": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/meta-data": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-annotation": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/content": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Core and Validation specifications meta-schema",
|
||||
"allOf": [
|
||||
{"$ref": "meta/core"},
|
||||
{"$ref": "meta/applicator"},
|
||||
{"$ref": "meta/unevaluated"},
|
||||
{"$ref": "meta/validation"},
|
||||
{"$ref": "meta/meta-data"},
|
||||
{"$ref": "meta/format-annotation"},
|
||||
{"$ref": "meta/content"}
|
||||
],
|
||||
"type": ["object", "boolean"],
|
||||
"$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.",
|
||||
"properties": {
|
||||
"definitions": {
|
||||
"$comment": "\"definitions\" has been replaced by \"$defs\".",
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"deprecated": true,
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$dynamicRef": "#meta" },
|
||||
{ "$ref": "meta/validation#/$defs/stringArray" }
|
||||
]
|
||||
},
|
||||
"deprecated": true,
|
||||
"default": {}
|
||||
},
|
||||
"$recursiveAnchor": {
|
||||
"$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".",
|
||||
"$ref": "meta/core#/$defs/anchorString",
|
||||
"deprecated": true
|
||||
},
|
||||
"$recursiveRef": {
|
||||
"$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".",
|
||||
"$ref": "meta/core#/$defs/uriReferenceString",
|
||||
"deprecated": true
|
||||
}
|
||||
}
|
||||
}
|
||||
549
vendor/github.com/santhosh-tekuri/jsonschema/v6/objcompiler.go
generated
vendored
Normal file
549
vendor/github.com/santhosh-tekuri/jsonschema/v6/objcompiler.go
generated
vendored
Normal file
|
|
@ -0,0 +1,549 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type objCompiler struct {
|
||||
c *Compiler
|
||||
obj map[string]any
|
||||
up urlPtr
|
||||
r *root
|
||||
res *resource
|
||||
q *queue
|
||||
}
|
||||
|
||||
func (c *objCompiler) compile(s *Schema) error {
|
||||
// id --
|
||||
if id := c.res.dialect.draft.getID(c.obj); id != "" {
|
||||
s.ID = id
|
||||
}
|
||||
|
||||
// anchor --
|
||||
if s.DraftVersion < 2019 {
|
||||
// anchor is specified in id
|
||||
id := c.string(c.res.dialect.draft.id)
|
||||
if id != "" {
|
||||
_, f := split(id)
|
||||
if f != "" {
|
||||
var err error
|
||||
s.Anchor, err = decode(f)
|
||||
if err != nil {
|
||||
return &ParseAnchorError{URL: s.Location}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s.Anchor = c.string("$anchor")
|
||||
}
|
||||
|
||||
if err := c.compileDraft4(s); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.DraftVersion >= 6 {
|
||||
if err := c.compileDraft6(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if s.DraftVersion >= 7 {
|
||||
if err := c.compileDraft7(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if s.DraftVersion >= 2019 {
|
||||
if err := c.compileDraft2019(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if s.DraftVersion >= 2020 {
|
||||
if err := c.compileDraft2020(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// vocabularies
|
||||
vocabs := c.res.dialect.activeVocabs(c.c.roots.assertVocabs, c.c.roots.vocabularies)
|
||||
for _, vocab := range vocabs {
|
||||
v := c.c.roots.vocabularies[vocab]
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
ext, err := v.Compile(&CompilerContext{c}, c.obj)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ext != nil {
|
||||
s.Extensions = append(s.Extensions, ext)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft4(s *Schema) error {
|
||||
var err error
|
||||
|
||||
if c.hasVocab("core") {
|
||||
if s.Ref, err = c.enqueueRef("$ref"); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.DraftVersion < 2019 && s.Ref != nil {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if c.hasVocab("applicator") {
|
||||
s.AllOf = c.enqueueArr("allOf")
|
||||
s.AnyOf = c.enqueueArr("anyOf")
|
||||
s.OneOf = c.enqueueArr("oneOf")
|
||||
s.Not = c.enqueueProp("not")
|
||||
|
||||
if s.DraftVersion < 2020 {
|
||||
if items, ok := c.obj["items"]; ok {
|
||||
if _, ok := items.([]any); ok {
|
||||
s.Items = c.enqueueArr("items")
|
||||
s.AdditionalItems = c.enqueueAdditional("additionalItems")
|
||||
} else {
|
||||
s.Items = c.enqueueProp("items")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
s.Properties = c.enqueueMap("properties")
|
||||
if m := c.enqueueMap("patternProperties"); m != nil {
|
||||
s.PatternProperties = map[Regexp]*Schema{}
|
||||
for pname, sch := range m {
|
||||
re, err := c.c.roots.regexpEngine(pname)
|
||||
if err != nil {
|
||||
return &InvalidRegexError{c.up.format("patternProperties"), pname, err}
|
||||
}
|
||||
s.PatternProperties[re] = sch
|
||||
}
|
||||
}
|
||||
s.AdditionalProperties = c.enqueueAdditional("additionalProperties")
|
||||
|
||||
if m := c.objVal("dependencies"); m != nil {
|
||||
s.Dependencies = map[string]any{}
|
||||
for pname, pvalue := range m {
|
||||
if arr, ok := pvalue.([]any); ok {
|
||||
s.Dependencies[pname] = toStrings(arr)
|
||||
} else {
|
||||
ptr := c.up.ptr.append2("dependencies", pname)
|
||||
s.Dependencies[pname] = c.enqueuePtr(ptr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.hasVocab("validation") {
|
||||
if t, ok := c.obj["type"]; ok {
|
||||
s.Types = newTypes(t)
|
||||
}
|
||||
if arr := c.arrVal("enum"); arr != nil {
|
||||
s.Enum = newEnum(arr)
|
||||
}
|
||||
s.MultipleOf = c.numVal("multipleOf")
|
||||
s.Maximum = c.numVal("maximum")
|
||||
if c.boolean("exclusiveMaximum") {
|
||||
s.ExclusiveMaximum = s.Maximum
|
||||
s.Maximum = nil
|
||||
} else {
|
||||
s.ExclusiveMaximum = c.numVal("exclusiveMaximum")
|
||||
}
|
||||
s.Minimum = c.numVal("minimum")
|
||||
if c.boolean("exclusiveMinimum") {
|
||||
s.ExclusiveMinimum = s.Minimum
|
||||
s.Minimum = nil
|
||||
} else {
|
||||
s.ExclusiveMinimum = c.numVal("exclusiveMinimum")
|
||||
}
|
||||
|
||||
s.MinLength = c.intVal("minLength")
|
||||
s.MaxLength = c.intVal("maxLength")
|
||||
if pat := c.strVal("pattern"); pat != nil {
|
||||
s.Pattern, err = c.c.roots.regexpEngine(*pat)
|
||||
if err != nil {
|
||||
return &InvalidRegexError{c.up.format("pattern"), *pat, err}
|
||||
}
|
||||
}
|
||||
|
||||
s.MinItems = c.intVal("minItems")
|
||||
s.MaxItems = c.intVal("maxItems")
|
||||
s.UniqueItems = c.boolean("uniqueItems")
|
||||
|
||||
s.MaxProperties = c.intVal("maxProperties")
|
||||
s.MinProperties = c.intVal("minProperties")
|
||||
if arr := c.arrVal("required"); arr != nil {
|
||||
s.Required = toStrings(arr)
|
||||
}
|
||||
}
|
||||
|
||||
// format --
|
||||
if c.assertFormat(s.DraftVersion) {
|
||||
if f := c.strVal("format"); f != nil {
|
||||
if *f == "regex" {
|
||||
s.Format = &Format{
|
||||
Name: "regex",
|
||||
Validate: c.c.roots.regexpEngine.validate,
|
||||
}
|
||||
} else {
|
||||
s.Format = c.c.formats[*f]
|
||||
if s.Format == nil {
|
||||
s.Format = formats[*f]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// annotations --
|
||||
s.Title = c.string("title")
|
||||
s.Description = c.string("description")
|
||||
if v, ok := c.obj["default"]; ok {
|
||||
s.Default = &v
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft6(s *Schema) error {
|
||||
if c.hasVocab("applicator") {
|
||||
s.Contains = c.enqueueProp("contains")
|
||||
s.PropertyNames = c.enqueueProp("propertyNames")
|
||||
}
|
||||
if c.hasVocab("validation") {
|
||||
if v, ok := c.obj["const"]; ok {
|
||||
s.Const = &v
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft7(s *Schema) error {
|
||||
if c.hasVocab("applicator") {
|
||||
s.If = c.enqueueProp("if")
|
||||
if s.If != nil {
|
||||
b := c.boolVal("if")
|
||||
if b == nil || *b {
|
||||
s.Then = c.enqueueProp("then")
|
||||
}
|
||||
if b == nil || !*b {
|
||||
s.Else = c.enqueueProp("else")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.c.assertContent {
|
||||
if ce := c.strVal("contentEncoding"); ce != nil {
|
||||
s.ContentEncoding = c.c.decoders[*ce]
|
||||
if s.ContentEncoding == nil {
|
||||
s.ContentEncoding = decoders[*ce]
|
||||
}
|
||||
}
|
||||
if cm := c.strVal("contentMediaType"); cm != nil {
|
||||
s.ContentMediaType = c.c.mediaTypes[*cm]
|
||||
if s.ContentMediaType == nil {
|
||||
s.ContentMediaType = mediaTypes[*cm]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// annotations --
|
||||
s.Comment = c.string("$comment")
|
||||
s.ReadOnly = c.boolean("readOnly")
|
||||
s.WriteOnly = c.boolean("writeOnly")
|
||||
if arr, ok := c.obj["examples"].([]any); ok {
|
||||
s.Examples = arr
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft2019(s *Schema) error {
|
||||
var err error
|
||||
|
||||
if c.hasVocab("core") {
|
||||
if s.RecursiveRef, err = c.enqueueRef("$recursiveRef"); err != nil {
|
||||
return err
|
||||
}
|
||||
s.RecursiveAnchor = c.boolean("$recursiveAnchor")
|
||||
}
|
||||
|
||||
if c.hasVocab("validation") {
|
||||
if s.Contains != nil {
|
||||
s.MinContains = c.intVal("minContains")
|
||||
s.MaxContains = c.intVal("maxContains")
|
||||
}
|
||||
if m := c.objVal("dependentRequired"); m != nil {
|
||||
s.DependentRequired = map[string][]string{}
|
||||
for pname, pvalue := range m {
|
||||
if arr, ok := pvalue.([]any); ok {
|
||||
s.DependentRequired[pname] = toStrings(arr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.hasVocab("applicator") {
|
||||
s.DependentSchemas = c.enqueueMap("dependentSchemas")
|
||||
}
|
||||
|
||||
var unevaluated bool
|
||||
if s.DraftVersion == 2019 {
|
||||
unevaluated = c.hasVocab("applicator")
|
||||
} else {
|
||||
unevaluated = c.hasVocab("unevaluated")
|
||||
}
|
||||
if unevaluated {
|
||||
s.UnevaluatedItems = c.enqueueProp("unevaluatedItems")
|
||||
s.UnevaluatedProperties = c.enqueueProp("unevaluatedProperties")
|
||||
}
|
||||
|
||||
if c.c.assertContent {
|
||||
if s.ContentMediaType != nil && s.ContentMediaType.UnmarshalJSON != nil {
|
||||
s.ContentSchema = c.enqueueProp("contentSchema")
|
||||
}
|
||||
}
|
||||
|
||||
// annotations --
|
||||
s.Deprecated = c.boolean("deprecated")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft2020(s *Schema) error {
|
||||
if c.hasVocab("core") {
|
||||
sch, err := c.enqueueRef("$dynamicRef")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if sch != nil {
|
||||
dref := c.strVal("$dynamicRef")
|
||||
_, frag, err := splitFragment(*dref)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var anch string
|
||||
if anchor, ok := frag.convert().(anchor); ok {
|
||||
anch = string(anchor)
|
||||
}
|
||||
s.DynamicRef = &DynamicRef{sch, anch}
|
||||
}
|
||||
s.DynamicAnchor = c.string("$dynamicAnchor")
|
||||
}
|
||||
|
||||
if c.hasVocab("applicator") {
|
||||
s.PrefixItems = c.enqueueArr("prefixItems")
|
||||
s.Items2020 = c.enqueueProp("items")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// enqueue helpers --
|
||||
|
||||
func (c *objCompiler) enqueuePtr(ptr jsonPointer) *Schema {
|
||||
up := urlPtr{c.up.url, ptr}
|
||||
return c.c.enqueue(c.q, up)
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueRef(pname string) (*Schema, error) {
|
||||
ref := c.strVal(pname)
|
||||
if ref == nil {
|
||||
return nil, nil
|
||||
}
|
||||
baseURL := c.res.id
|
||||
// baseURL := c.r.baseURL(c.up.ptr)
|
||||
uf, err := baseURL.join(*ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
up, err := c.r.resolve(*uf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if up != nil {
|
||||
// local ref
|
||||
return c.enqueuePtr(up.ptr), nil
|
||||
}
|
||||
|
||||
// remote ref
|
||||
up_, err := c.c.roots.resolveFragment(*uf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.c.enqueue(c.q, up_), nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueProp(pname string) *Schema {
|
||||
if _, ok := c.obj[pname]; !ok {
|
||||
return nil
|
||||
}
|
||||
ptr := c.up.ptr.append(pname)
|
||||
return c.enqueuePtr(ptr)
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueArr(pname string) []*Schema {
|
||||
arr := c.arrVal(pname)
|
||||
if arr == nil {
|
||||
return nil
|
||||
}
|
||||
sch := make([]*Schema, len(arr))
|
||||
for i := range arr {
|
||||
ptr := c.up.ptr.append2(pname, strconv.Itoa(i))
|
||||
sch[i] = c.enqueuePtr(ptr)
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueMap(pname string) map[string]*Schema {
|
||||
obj := c.objVal(pname)
|
||||
if obj == nil {
|
||||
return nil
|
||||
}
|
||||
sch := make(map[string]*Schema)
|
||||
for k := range obj {
|
||||
ptr := c.up.ptr.append2(pname, k)
|
||||
sch[k] = c.enqueuePtr(ptr)
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueAdditional(pname string) any {
|
||||
if b := c.boolVal(pname); b != nil {
|
||||
return *b
|
||||
}
|
||||
if sch := c.enqueueProp(pname); sch != nil {
|
||||
return sch
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func (c *objCompiler) hasVocab(name string) bool {
|
||||
return c.res.dialect.hasVocab(name)
|
||||
}
|
||||
|
||||
func (c *objCompiler) assertFormat(draftVersion int) bool {
|
||||
if c.c.assertFormat || draftVersion < 2019 {
|
||||
return true
|
||||
}
|
||||
if draftVersion == 2019 {
|
||||
return c.hasVocab("format")
|
||||
} else {
|
||||
return c.hasVocab("format-assertion")
|
||||
}
|
||||
}
|
||||
|
||||
// value helpers --
|
||||
|
||||
func (c *objCompiler) boolVal(pname string) *bool {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
b, ok := v.(bool)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return &b
|
||||
}
|
||||
|
||||
func (c *objCompiler) boolean(pname string) bool {
|
||||
b := c.boolVal(pname)
|
||||
return b != nil && *b
|
||||
}
|
||||
|
||||
func (c *objCompiler) strVal(pname string) *string {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return &s
|
||||
}
|
||||
|
||||
func (c *objCompiler) string(pname string) string {
|
||||
if s := c.strVal(pname); s != nil {
|
||||
return *s
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (c *objCompiler) numVal(pname string) *big.Rat {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
switch v.(type) {
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
if n, ok := new(big.Rat).SetString(fmt.Sprint(v)); ok {
|
||||
return n
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) intVal(pname string) *int {
|
||||
if n := c.numVal(pname); n != nil && n.IsInt() {
|
||||
n := int(n.Num().Int64())
|
||||
return &n
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) objVal(pname string) map[string]any {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
obj, ok := v.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
func (c *objCompiler) arrVal(pname string) []any {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
arr, ok := v.([]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type InvalidRegexError struct {
|
||||
URL string
|
||||
Regex string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *InvalidRegexError) Error() string {
|
||||
return fmt.Sprintf("invalid regex %q at %q: %v", e.Regex, e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func toStrings(arr []any) []string {
|
||||
var strings []string
|
||||
for _, item := range arr {
|
||||
if s, ok := item.(string); ok {
|
||||
strings = append(strings, s)
|
||||
}
|
||||
}
|
||||
return strings
|
||||
}
|
||||
212
vendor/github.com/santhosh-tekuri/jsonschema/v6/output.go
generated
vendored
Normal file
212
vendor/github.com/santhosh-tekuri/jsonschema/v6/output.go
generated
vendored
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6/kind"
|
||||
"golang.org/x/text/language"
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
var defaultPrinter = message.NewPrinter(language.English)
|
||||
|
||||
// format ---
|
||||
|
||||
func (e *ValidationError) schemaURL() string {
|
||||
if ref, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
return ref.URL
|
||||
} else {
|
||||
return e.SchemaURL
|
||||
}
|
||||
}
|
||||
|
||||
func (e *ValidationError) absoluteKeywordLocation() string {
|
||||
var schemaURL string
|
||||
var keywordPath []string
|
||||
if ref, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
schemaURL = ref.URL
|
||||
keywordPath = nil
|
||||
} else {
|
||||
schemaURL = e.SchemaURL
|
||||
keywordPath = e.ErrorKind.KeywordPath()
|
||||
}
|
||||
return fmt.Sprintf("%s%s", schemaURL, encode(jsonPtr(keywordPath)))
|
||||
}
|
||||
|
||||
func (e *ValidationError) skip() bool {
|
||||
if len(e.Causes) == 1 {
|
||||
_, ok := e.ErrorKind.(*kind.Reference)
|
||||
return ok
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *ValidationError) display(sb *strings.Builder, verbose bool, indent int, absKwLoc string, p *message.Printer) {
|
||||
if !e.skip() {
|
||||
if indent > 0 {
|
||||
sb.WriteByte('\n')
|
||||
for i := 0; i < indent-1; i++ {
|
||||
sb.WriteString(" ")
|
||||
}
|
||||
sb.WriteString("- ")
|
||||
}
|
||||
indent = indent + 1
|
||||
|
||||
prevAbsKwLoc := absKwLoc
|
||||
absKwLoc = e.absoluteKeywordLocation()
|
||||
|
||||
if _, ok := e.ErrorKind.(*kind.Schema); ok {
|
||||
sb.WriteString(e.ErrorKind.LocalizedString(p))
|
||||
} else {
|
||||
sb.WriteString(p.Sprintf("at %s", quote(jsonPtr(e.InstanceLocation))))
|
||||
if verbose {
|
||||
schLoc := absKwLoc
|
||||
if prevAbsKwLoc != "" {
|
||||
pu, _ := split(prevAbsKwLoc)
|
||||
u, f := split(absKwLoc)
|
||||
if u == pu {
|
||||
schLoc = fmt.Sprintf("S#%s", f)
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(sb, " [%s]", schLoc)
|
||||
}
|
||||
fmt.Fprintf(sb, ": %s", e.ErrorKind.LocalizedString(p))
|
||||
}
|
||||
}
|
||||
for _, cause := range e.Causes {
|
||||
cause.display(sb, verbose, indent, absKwLoc, p)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *ValidationError) Error() string {
|
||||
return e.LocalizedError(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedError(p *message.Printer) string {
|
||||
var sb strings.Builder
|
||||
e.display(&sb, false, 0, "", p)
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (e *ValidationError) GoString() string {
|
||||
return e.LocalizedGoString(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedGoString(p *message.Printer) string {
|
||||
var sb strings.Builder
|
||||
e.display(&sb, true, 0, "", p)
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func jsonPtr(tokens []string) string {
|
||||
var sb strings.Builder
|
||||
for _, tok := range tokens {
|
||||
sb.WriteByte('/')
|
||||
sb.WriteString(escape(tok))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// Flag is output format with simple boolean property valid.
|
||||
type FlagOutput struct {
|
||||
Valid bool `json:"valid"`
|
||||
}
|
||||
|
||||
// The `Flag` output format, merely the boolean result.
|
||||
func (e *ValidationError) FlagOutput() *FlagOutput {
|
||||
return &FlagOutput{Valid: false}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type OutputUnit struct {
|
||||
Valid bool `json:"valid"`
|
||||
KeywordLocation string `json:"keywordLocation"`
|
||||
AbsoluteKeywordLocation string `json:"AbsoluteKeywordLocation,omitempty"`
|
||||
InstanceLocation string `json:"instanceLocation"`
|
||||
Error *OutputError `json:"error,omitempty"`
|
||||
Errors []OutputUnit `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
type OutputError struct {
|
||||
Kind ErrorKind
|
||||
p *message.Printer
|
||||
}
|
||||
|
||||
func (k OutputError) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(k.Kind.LocalizedString(k.p))
|
||||
}
|
||||
|
||||
// The `Basic` structure, a flat list of output units.
|
||||
func (e *ValidationError) BasicOutput() *OutputUnit {
|
||||
return e.LocalizedBasicOutput(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedBasicOutput(p *message.Printer) *OutputUnit {
|
||||
out := e.output(true, false, "", "", p)
|
||||
return &out
|
||||
}
|
||||
|
||||
// The `Detailed` structure, based on the schema.
|
||||
func (e *ValidationError) DetailedOutput() *OutputUnit {
|
||||
return e.LocalizedDetailedOutput(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedDetailedOutput(p *message.Printer) *OutputUnit {
|
||||
out := e.output(false, false, "", "", p)
|
||||
return &out
|
||||
}
|
||||
|
||||
func (e *ValidationError) output(flatten, inRef bool, schemaURL, kwLoc string, p *message.Printer) OutputUnit {
|
||||
if !inRef {
|
||||
if _, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
inRef = true
|
||||
}
|
||||
}
|
||||
if schemaURL != "" {
|
||||
kwLoc += e.SchemaURL[len(schemaURL):]
|
||||
if ref, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
kwLoc += jsonPtr(ref.KeywordPath())
|
||||
}
|
||||
}
|
||||
schemaURL = e.schemaURL()
|
||||
|
||||
keywordLocation := kwLoc
|
||||
if _, ok := e.ErrorKind.(*kind.Reference); !ok {
|
||||
keywordLocation += jsonPtr(e.ErrorKind.KeywordPath())
|
||||
}
|
||||
|
||||
out := OutputUnit{
|
||||
Valid: false,
|
||||
InstanceLocation: jsonPtr(e.InstanceLocation),
|
||||
KeywordLocation: keywordLocation,
|
||||
}
|
||||
if inRef {
|
||||
out.AbsoluteKeywordLocation = e.absoluteKeywordLocation()
|
||||
}
|
||||
for _, cause := range e.Causes {
|
||||
causeOut := cause.output(flatten, inRef, schemaURL, kwLoc, p)
|
||||
if cause.skip() {
|
||||
causeOut = causeOut.Errors[0]
|
||||
}
|
||||
if flatten {
|
||||
errors := causeOut.Errors
|
||||
causeOut.Errors = nil
|
||||
causeOut.Error = &OutputError{cause.ErrorKind, p}
|
||||
out.Errors = append(out.Errors, causeOut)
|
||||
if len(errors) > 0 {
|
||||
out.Errors = append(out.Errors, errors...)
|
||||
}
|
||||
} else {
|
||||
out.Errors = append(out.Errors, causeOut)
|
||||
}
|
||||
}
|
||||
if len(out.Errors) == 0 {
|
||||
out.Error = &OutputError{e.ErrorKind, p}
|
||||
}
|
||||
return out
|
||||
}
|
||||
142
vendor/github.com/santhosh-tekuri/jsonschema/v6/position.go
generated
vendored
Normal file
142
vendor/github.com/santhosh-tekuri/jsonschema/v6/position.go
generated
vendored
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Position tells possible tokens in json.
|
||||
type Position interface {
|
||||
collect(v any, ptr jsonPointer) map[jsonPointer]any
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AllProp struct{}
|
||||
|
||||
func (AllProp) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
obj, ok := v.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
m := map[jsonPointer]any{}
|
||||
for pname, pvalue := range obj {
|
||||
m[ptr.append(pname)] = pvalue
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AllItem struct{}
|
||||
|
||||
func (AllItem) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
arr, ok := v.([]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
m := map[jsonPointer]any{}
|
||||
for i, item := range arr {
|
||||
m[ptr.append(strconv.Itoa(i))] = item
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Prop string
|
||||
|
||||
func (p Prop) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
obj, ok := v.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
pvalue, ok := obj[string(p)]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return map[jsonPointer]any{
|
||||
ptr.append(string(p)): pvalue,
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Item int
|
||||
|
||||
func (i Item) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
arr, ok := v.([]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if i < 0 || int(i) >= len(arr) {
|
||||
return nil
|
||||
}
|
||||
return map[jsonPointer]any{
|
||||
ptr.append(strconv.Itoa(int(i))): arr[int(i)],
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// SchemaPath tells where to look for subschema inside keyword.
|
||||
type SchemaPath []Position
|
||||
|
||||
func schemaPath(path string) SchemaPath {
|
||||
var sp SchemaPath
|
||||
for _, tok := range strings.Split(path, "/") {
|
||||
var pos Position
|
||||
switch tok {
|
||||
case "*":
|
||||
pos = AllProp{}
|
||||
case "[]":
|
||||
pos = AllItem{}
|
||||
default:
|
||||
if i, err := strconv.Atoi(tok); err == nil {
|
||||
pos = Item(i)
|
||||
} else {
|
||||
pos = Prop(tok)
|
||||
}
|
||||
}
|
||||
sp = append(sp, pos)
|
||||
}
|
||||
return sp
|
||||
}
|
||||
|
||||
func (sp SchemaPath) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
if len(sp) == 0 {
|
||||
return map[jsonPointer]any{
|
||||
ptr: v,
|
||||
}
|
||||
}
|
||||
p, sp := sp[0], sp[1:]
|
||||
m := p.collect(v, ptr)
|
||||
mm := map[jsonPointer]any{}
|
||||
for ptr, v := range m {
|
||||
m = sp.collect(v, ptr)
|
||||
for k, v := range m {
|
||||
mm[k] = v
|
||||
}
|
||||
}
|
||||
return mm
|
||||
}
|
||||
|
||||
func (sp SchemaPath) String() string {
|
||||
var sb strings.Builder
|
||||
for _, pos := range sp {
|
||||
if sb.Len() != 0 {
|
||||
sb.WriteByte('/')
|
||||
}
|
||||
switch pos := pos.(type) {
|
||||
case AllProp:
|
||||
sb.WriteString("*")
|
||||
case AllItem:
|
||||
sb.WriteString("[]")
|
||||
case Prop:
|
||||
sb.WriteString(string(pos))
|
||||
case Item:
|
||||
sb.WriteString(strconv.Itoa(int(pos)))
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
202
vendor/github.com/santhosh-tekuri/jsonschema/v6/root.go
generated
vendored
Normal file
202
vendor/github.com/santhosh-tekuri/jsonschema/v6/root.go
generated
vendored
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type root struct {
|
||||
url url
|
||||
doc any
|
||||
resources map[jsonPointer]*resource
|
||||
subschemasProcessed map[jsonPointer]struct{}
|
||||
}
|
||||
|
||||
func (r *root) rootResource() *resource {
|
||||
return r.resources[""]
|
||||
}
|
||||
|
||||
func (r *root) resource(ptr jsonPointer) *resource {
|
||||
for {
|
||||
if res, ok := r.resources[ptr]; ok {
|
||||
return res
|
||||
}
|
||||
slash := strings.LastIndexByte(string(ptr), '/')
|
||||
if slash == -1 {
|
||||
break
|
||||
}
|
||||
ptr = ptr[:slash]
|
||||
}
|
||||
return r.rootResource()
|
||||
}
|
||||
|
||||
func (r *root) resolveFragmentIn(frag fragment, res *resource) (urlPtr, error) {
|
||||
var ptr jsonPointer
|
||||
switch f := frag.convert().(type) {
|
||||
case jsonPointer:
|
||||
ptr = res.ptr.concat(f)
|
||||
case anchor:
|
||||
aptr, ok := res.anchors[f]
|
||||
if !ok {
|
||||
return urlPtr{}, &AnchorNotFoundError{
|
||||
URL: r.url.String(),
|
||||
Reference: (&urlFrag{res.id, frag}).String(),
|
||||
}
|
||||
}
|
||||
ptr = aptr
|
||||
}
|
||||
return urlPtr{r.url, ptr}, nil
|
||||
}
|
||||
|
||||
func (r *root) resolveFragment(frag fragment) (urlPtr, error) {
|
||||
return r.resolveFragmentIn(frag, r.rootResource())
|
||||
}
|
||||
|
||||
// resovles urlFrag to urlPtr from root.
|
||||
// returns nil if it is external.
|
||||
func (r *root) resolve(uf urlFrag) (*urlPtr, error) {
|
||||
var res *resource
|
||||
if uf.url == r.url {
|
||||
res = r.rootResource()
|
||||
} else {
|
||||
// look for resource with id==uf.url
|
||||
for _, v := range r.resources {
|
||||
if v.id == uf.url {
|
||||
res = v
|
||||
break
|
||||
}
|
||||
}
|
||||
if res == nil {
|
||||
return nil, nil // external url
|
||||
}
|
||||
}
|
||||
up, err := r.resolveFragmentIn(uf.frag, res)
|
||||
return &up, err
|
||||
}
|
||||
|
||||
func (r *root) collectAnchors(sch any, schPtr jsonPointer, res *resource) error {
|
||||
obj, ok := sch.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
addAnchor := func(anchor anchor) error {
|
||||
ptr1, ok := res.anchors[anchor]
|
||||
if ok {
|
||||
if ptr1 == schPtr {
|
||||
// anchor with same root_ptr already exists
|
||||
return nil
|
||||
}
|
||||
return &DuplicateAnchorError{
|
||||
string(anchor), r.url.String(), string(ptr1), string(schPtr),
|
||||
}
|
||||
}
|
||||
res.anchors[anchor] = schPtr
|
||||
return nil
|
||||
}
|
||||
|
||||
if res.dialect.draft.version < 2019 {
|
||||
if _, ok := obj["$ref"]; ok {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return nil
|
||||
}
|
||||
// anchor is specified in id
|
||||
if id, ok := strVal(obj, res.dialect.draft.id); ok {
|
||||
_, frag, err := splitFragment(id)
|
||||
if err != nil {
|
||||
loc := urlPtr{r.url, schPtr}
|
||||
return &ParseAnchorError{loc.String()}
|
||||
}
|
||||
if anchor, ok := frag.convert().(anchor); ok {
|
||||
if err := addAnchor(anchor); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if res.dialect.draft.version >= 2019 {
|
||||
if s, ok := strVal(obj, "$anchor"); ok {
|
||||
if err := addAnchor(anchor(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if res.dialect.draft.version >= 2020 {
|
||||
if s, ok := strVal(obj, "$dynamicAnchor"); ok {
|
||||
if err := addAnchor(anchor(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
res.dynamicAnchors = append(res.dynamicAnchors, anchor(s))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *root) clone() *root {
|
||||
processed := map[jsonPointer]struct{}{}
|
||||
for k := range r.subschemasProcessed {
|
||||
processed[k] = struct{}{}
|
||||
}
|
||||
resources := map[jsonPointer]*resource{}
|
||||
for k, v := range r.resources {
|
||||
resources[k] = v.clone()
|
||||
}
|
||||
return &root{
|
||||
url: r.url,
|
||||
doc: r.doc,
|
||||
resources: resources,
|
||||
subschemasProcessed: processed,
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type resource struct {
|
||||
ptr jsonPointer
|
||||
id url
|
||||
dialect dialect
|
||||
anchors map[anchor]jsonPointer
|
||||
dynamicAnchors []anchor
|
||||
}
|
||||
|
||||
func newResource(ptr jsonPointer, id url) *resource {
|
||||
return &resource{ptr: ptr, id: id, anchors: make(map[anchor]jsonPointer)}
|
||||
}
|
||||
|
||||
func (res *resource) clone() *resource {
|
||||
anchors := map[anchor]jsonPointer{}
|
||||
for k, v := range res.anchors {
|
||||
anchors[k] = v
|
||||
}
|
||||
return &resource{
|
||||
ptr: res.ptr,
|
||||
id: res.id,
|
||||
dialect: res.dialect,
|
||||
anchors: anchors,
|
||||
dynamicAnchors: slices.Clone(res.dynamicAnchors),
|
||||
}
|
||||
}
|
||||
|
||||
//--
|
||||
|
||||
type UnsupportedVocabularyError struct {
|
||||
URL string
|
||||
Vocabulary string
|
||||
}
|
||||
|
||||
func (e *UnsupportedVocabularyError) Error() string {
|
||||
return fmt.Sprintf("unsupported vocabulary %q in %q", e.Vocabulary, e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AnchorNotFoundError struct {
|
||||
URL string
|
||||
Reference string
|
||||
}
|
||||
|
||||
func (e *AnchorNotFoundError) Error() string {
|
||||
return fmt.Sprintf("anchor in %q not found in schema %q", e.Reference, e.URL)
|
||||
}
|
||||
289
vendor/github.com/santhosh-tekuri/jsonschema/v6/roots.go
generated
vendored
Normal file
289
vendor/github.com/santhosh-tekuri/jsonschema/v6/roots.go
generated
vendored
Normal file
|
|
@ -0,0 +1,289 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type roots struct {
|
||||
defaultDraft *Draft
|
||||
roots map[url]*root
|
||||
loader defaultLoader
|
||||
regexpEngine RegexpEngine
|
||||
vocabularies map[string]*Vocabulary
|
||||
assertVocabs bool
|
||||
}
|
||||
|
||||
func newRoots() *roots {
|
||||
return &roots{
|
||||
defaultDraft: draftLatest,
|
||||
roots: map[url]*root{},
|
||||
loader: defaultLoader{
|
||||
docs: map[url]any{},
|
||||
loader: FileLoader{},
|
||||
},
|
||||
regexpEngine: goRegexpCompile,
|
||||
vocabularies: map[string]*Vocabulary{},
|
||||
}
|
||||
}
|
||||
|
||||
func (rr *roots) orLoad(u url) (*root, error) {
|
||||
if r, ok := rr.roots[u]; ok {
|
||||
return r, nil
|
||||
}
|
||||
doc, err := rr.loader.load(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rr.addRoot(u, doc)
|
||||
}
|
||||
|
||||
func (rr *roots) addRoot(u url, doc any) (*root, error) {
|
||||
r := &root{
|
||||
url: u,
|
||||
doc: doc,
|
||||
resources: map[jsonPointer]*resource{},
|
||||
subschemasProcessed: map[jsonPointer]struct{}{},
|
||||
}
|
||||
if err := rr.collectResources(r, doc, u, "", dialect{rr.defaultDraft, nil}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !strings.HasPrefix(u.String(), "http://json-schema.org/") &&
|
||||
!strings.HasPrefix(u.String(), "https://json-schema.org/") {
|
||||
if err := rr.validate(r, doc, ""); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
rr.roots[u] = r
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (rr *roots) resolveFragment(uf urlFrag) (urlPtr, error) {
|
||||
r, err := rr.orLoad(uf.url)
|
||||
if err != nil {
|
||||
return urlPtr{}, err
|
||||
}
|
||||
return r.resolveFragment(uf.frag)
|
||||
}
|
||||
|
||||
func (rr *roots) collectResources(r *root, sch any, base url, schPtr jsonPointer, fallback dialect) error {
|
||||
if _, ok := r.subschemasProcessed[schPtr]; ok {
|
||||
return nil
|
||||
}
|
||||
if err := rr._collectResources(r, sch, base, schPtr, fallback); err != nil {
|
||||
return err
|
||||
}
|
||||
r.subschemasProcessed[schPtr] = struct{}{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) _collectResources(r *root, sch any, base url, schPtr jsonPointer, fallback dialect) error {
|
||||
if _, ok := sch.(bool); ok {
|
||||
if schPtr.isEmpty() {
|
||||
// root resource
|
||||
res := newResource(schPtr, base)
|
||||
res.dialect = fallback
|
||||
r.resources[schPtr] = res
|
||||
}
|
||||
return nil
|
||||
}
|
||||
obj, ok := sch.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
hasSchema := false
|
||||
if sch, ok := obj["$schema"]; ok {
|
||||
if _, ok := sch.(string); ok {
|
||||
hasSchema = true
|
||||
}
|
||||
}
|
||||
|
||||
draft, err := rr.loader.getDraft(urlPtr{r.url, schPtr}, sch, fallback.draft, map[url]struct{}{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
id := draft.getID(obj)
|
||||
if id == "" && !schPtr.isEmpty() {
|
||||
// ignore $schema
|
||||
draft = fallback.draft
|
||||
hasSchema = false
|
||||
id = draft.getID(obj)
|
||||
}
|
||||
|
||||
var res *resource
|
||||
if id != "" {
|
||||
uf, err := base.join(id)
|
||||
if err != nil {
|
||||
loc := urlPtr{r.url, schPtr}
|
||||
return &ParseIDError{loc.String()}
|
||||
}
|
||||
base = uf.url
|
||||
res = newResource(schPtr, base)
|
||||
} else if schPtr.isEmpty() {
|
||||
// root resource
|
||||
res = newResource(schPtr, base)
|
||||
}
|
||||
|
||||
if res != nil {
|
||||
found := false
|
||||
for _, res := range r.resources {
|
||||
if res.id == base {
|
||||
found = true
|
||||
if res.ptr != schPtr {
|
||||
return &DuplicateIDError{base.String(), r.url.String(), string(schPtr), string(res.ptr)}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
if hasSchema {
|
||||
vocabs, err := rr.loader.getMetaVocabs(sch, draft, rr.vocabularies)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res.dialect = dialect{draft, vocabs}
|
||||
} else {
|
||||
res.dialect = fallback
|
||||
}
|
||||
r.resources[schPtr] = res
|
||||
}
|
||||
}
|
||||
|
||||
var baseRes *resource
|
||||
for _, res := range r.resources {
|
||||
if res.id == base {
|
||||
baseRes = res
|
||||
break
|
||||
}
|
||||
}
|
||||
if baseRes == nil {
|
||||
panic("baseres is nil")
|
||||
}
|
||||
|
||||
// found base resource
|
||||
if err := r.collectAnchors(sch, schPtr, baseRes); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// process subschemas
|
||||
subschemas := map[jsonPointer]any{}
|
||||
for _, sp := range draft.subschemas {
|
||||
ss := sp.collect(obj, schPtr)
|
||||
for k, v := range ss {
|
||||
subschemas[k] = v
|
||||
}
|
||||
}
|
||||
for _, vocab := range baseRes.dialect.activeVocabs(true, rr.vocabularies) {
|
||||
if v := rr.vocabularies[vocab]; v != nil {
|
||||
for _, sp := range v.Subschemas {
|
||||
ss := sp.collect(obj, schPtr)
|
||||
for k, v := range ss {
|
||||
subschemas[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for ptr, v := range subschemas {
|
||||
if err := rr.collectResources(r, v, base, ptr, baseRes.dialect); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) ensureSubschema(up urlPtr) error {
|
||||
r, err := rr.orLoad(up.url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, ok := r.subschemasProcessed[up.ptr]; ok {
|
||||
return nil
|
||||
}
|
||||
v, err := up.lookup(r.doc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rClone := r.clone()
|
||||
if err := rr.addSubschema(rClone, up.ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := rr.validate(rClone, v, up.ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
rr.roots[r.url] = rClone
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) addSubschema(r *root, ptr jsonPointer) error {
|
||||
v, err := (&urlPtr{r.url, ptr}).lookup(r.doc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
base := r.resource(ptr)
|
||||
baseURL := base.id
|
||||
if err := rr.collectResources(r, v, baseURL, ptr, base.dialect); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// collect anchors
|
||||
if _, ok := r.resources[ptr]; !ok {
|
||||
res := r.resource(ptr)
|
||||
if err := r.collectAnchors(v, ptr, res); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) validate(r *root, v any, ptr jsonPointer) error {
|
||||
dialect := r.resource(ptr).dialect
|
||||
meta := dialect.getSchema(rr.assertVocabs, rr.vocabularies)
|
||||
if err := meta.validate(v, rr.regexpEngine, meta, r.resources, rr.assertVocabs, rr.vocabularies); err != nil {
|
||||
up := urlPtr{r.url, ptr}
|
||||
return &SchemaValidationError{URL: up.String(), Err: err}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type InvalidMetaSchemaURLError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *InvalidMetaSchemaURLError) Error() string {
|
||||
return fmt.Sprintf("invalid $schema in %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type UnsupportedDraftError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *UnsupportedDraftError) Error() string {
|
||||
return fmt.Sprintf("draft %q is not supported", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MetaSchemaCycleError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *MetaSchemaCycleError) Error() string {
|
||||
return fmt.Sprintf("cycle in resolving $schema in %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MetaSchemaMismatchError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *MetaSchemaMismatchError) Error() string {
|
||||
return fmt.Sprintf("$schema in %q does not match with $schema in root", e.URL)
|
||||
}
|
||||
248
vendor/github.com/santhosh-tekuri/jsonschema/v6/schema.go
generated
vendored
Normal file
248
vendor/github.com/santhosh-tekuri/jsonschema/v6/schema.go
generated
vendored
Normal file
|
|
@ -0,0 +1,248 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
)
|
||||
|
||||
// Schema is the regpresentation of a compiled
|
||||
// jsonschema.
|
||||
type Schema struct {
|
||||
up urlPtr
|
||||
resource *Schema
|
||||
dynamicAnchors map[string]*Schema
|
||||
allPropsEvaluated bool
|
||||
allItemsEvaluated bool
|
||||
numItemsEvaluated int
|
||||
|
||||
DraftVersion int
|
||||
Location string
|
||||
|
||||
// type agnostic --
|
||||
Bool *bool // boolean schema
|
||||
ID string
|
||||
Ref *Schema
|
||||
Anchor string
|
||||
RecursiveRef *Schema
|
||||
RecursiveAnchor bool
|
||||
DynamicRef *DynamicRef
|
||||
DynamicAnchor string // "" if not specified
|
||||
Types *Types
|
||||
Enum *Enum
|
||||
Const *any
|
||||
Not *Schema
|
||||
AllOf []*Schema
|
||||
AnyOf []*Schema
|
||||
OneOf []*Schema
|
||||
If *Schema
|
||||
Then *Schema
|
||||
Else *Schema
|
||||
Format *Format
|
||||
|
||||
// object --
|
||||
MaxProperties *int
|
||||
MinProperties *int
|
||||
Required []string
|
||||
PropertyNames *Schema
|
||||
Properties map[string]*Schema
|
||||
PatternProperties map[Regexp]*Schema
|
||||
AdditionalProperties any // nil or bool or *Schema
|
||||
Dependencies map[string]any // value is []string or *Schema
|
||||
DependentRequired map[string][]string
|
||||
DependentSchemas map[string]*Schema
|
||||
UnevaluatedProperties *Schema
|
||||
|
||||
// array --
|
||||
MinItems *int
|
||||
MaxItems *int
|
||||
UniqueItems bool
|
||||
Contains *Schema
|
||||
MinContains *int
|
||||
MaxContains *int
|
||||
Items any // nil or []*Schema or *Schema
|
||||
AdditionalItems any // nil or bool or *Schema
|
||||
PrefixItems []*Schema
|
||||
Items2020 *Schema
|
||||
UnevaluatedItems *Schema
|
||||
|
||||
// string --
|
||||
MinLength *int
|
||||
MaxLength *int
|
||||
Pattern Regexp
|
||||
ContentEncoding *Decoder
|
||||
ContentMediaType *MediaType
|
||||
ContentSchema *Schema
|
||||
|
||||
// number --
|
||||
Maximum *big.Rat
|
||||
Minimum *big.Rat
|
||||
ExclusiveMaximum *big.Rat
|
||||
ExclusiveMinimum *big.Rat
|
||||
MultipleOf *big.Rat
|
||||
|
||||
Extensions []SchemaExt
|
||||
|
||||
// annotations --
|
||||
Title string
|
||||
Description string
|
||||
Default *any
|
||||
Comment string
|
||||
ReadOnly bool
|
||||
WriteOnly bool
|
||||
Examples []any
|
||||
Deprecated bool
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type jsonType int
|
||||
|
||||
const (
|
||||
invalidType jsonType = 0
|
||||
nullType jsonType = 1 << iota
|
||||
booleanType
|
||||
numberType
|
||||
integerType
|
||||
stringType
|
||||
arrayType
|
||||
objectType
|
||||
)
|
||||
|
||||
func typeOf(v any) jsonType {
|
||||
switch v.(type) {
|
||||
case nil:
|
||||
return nullType
|
||||
case bool:
|
||||
return booleanType
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
return numberType
|
||||
case string:
|
||||
return stringType
|
||||
case []any:
|
||||
return arrayType
|
||||
case map[string]any:
|
||||
return objectType
|
||||
default:
|
||||
return invalidType
|
||||
}
|
||||
}
|
||||
|
||||
func typeFromString(s string) jsonType {
|
||||
switch s {
|
||||
case "null":
|
||||
return nullType
|
||||
case "boolean":
|
||||
return booleanType
|
||||
case "number":
|
||||
return numberType
|
||||
case "integer":
|
||||
return integerType
|
||||
case "string":
|
||||
return stringType
|
||||
case "array":
|
||||
return arrayType
|
||||
case "object":
|
||||
return objectType
|
||||
}
|
||||
return invalidType
|
||||
}
|
||||
|
||||
func (jt jsonType) String() string {
|
||||
switch jt {
|
||||
case nullType:
|
||||
return "null"
|
||||
case booleanType:
|
||||
return "boolean"
|
||||
case numberType:
|
||||
return "number"
|
||||
case integerType:
|
||||
return "integer"
|
||||
case stringType:
|
||||
return "string"
|
||||
case arrayType:
|
||||
return "array"
|
||||
case objectType:
|
||||
return "object"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// Types encapsulates list of json value types.
|
||||
type Types int
|
||||
|
||||
func newTypes(v any) *Types {
|
||||
var types Types
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
types.add(typeFromString(v))
|
||||
case []any:
|
||||
for _, item := range v {
|
||||
if s, ok := item.(string); ok {
|
||||
types.add(typeFromString(s))
|
||||
}
|
||||
}
|
||||
}
|
||||
if types.IsEmpty() {
|
||||
return nil
|
||||
}
|
||||
return &types
|
||||
}
|
||||
|
||||
func (tt Types) IsEmpty() bool {
|
||||
return tt == 0
|
||||
}
|
||||
|
||||
func (tt *Types) add(t jsonType) {
|
||||
*tt = Types(int(*tt) | int(t))
|
||||
}
|
||||
|
||||
func (tt Types) contains(t jsonType) bool {
|
||||
return int(tt)&int(t) != 0
|
||||
}
|
||||
|
||||
func (tt Types) ToStrings() []string {
|
||||
types := []jsonType{
|
||||
nullType, booleanType, numberType, integerType,
|
||||
stringType, arrayType, objectType,
|
||||
}
|
||||
var arr []string
|
||||
for _, t := range types {
|
||||
if tt.contains(t) {
|
||||
arr = append(arr, t.String())
|
||||
}
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
func (tt Types) String() string {
|
||||
return fmt.Sprintf("%v", tt.ToStrings())
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Enum struct {
|
||||
Values []any
|
||||
types Types
|
||||
}
|
||||
|
||||
func newEnum(arr []any) *Enum {
|
||||
var types Types
|
||||
for _, item := range arr {
|
||||
types.add(typeOf(item))
|
||||
}
|
||||
return &Enum{arr, types}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DynamicRef struct {
|
||||
Ref *Schema
|
||||
Anchor string // "" if not specified
|
||||
}
|
||||
|
||||
func newSchema(up urlPtr) *Schema {
|
||||
return &Schema{up: up, Location: up.String()}
|
||||
}
|
||||
464
vendor/github.com/santhosh-tekuri/jsonschema/v6/util.go
generated
vendored
Normal file
464
vendor/github.com/santhosh-tekuri/jsonschema/v6/util.go
generated
vendored
Normal file
|
|
@ -0,0 +1,464 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"math/big"
|
||||
gourl "net/url"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6/kind"
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
// --
|
||||
|
||||
type url (string)
|
||||
|
||||
func (u url) String() string {
|
||||
return string(u)
|
||||
}
|
||||
|
||||
func (u url) join(ref string) (*urlFrag, error) {
|
||||
base, err := gourl.Parse(string(u))
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: u.String(), Err: err}
|
||||
}
|
||||
|
||||
ref, frag, err := splitFragment(ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
refURL, err := gourl.Parse(ref)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: ref, Err: err}
|
||||
}
|
||||
resolved := base.ResolveReference(refURL)
|
||||
|
||||
// see https://github.com/golang/go/issues/66084 (net/url: ResolveReference ignores Opaque value)
|
||||
if !refURL.IsAbs() && base.Opaque != "" {
|
||||
resolved.Opaque = base.Opaque
|
||||
}
|
||||
|
||||
return &urlFrag{url: url(resolved.String()), frag: frag}, nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type jsonPointer string
|
||||
|
||||
func escape(tok string) string {
|
||||
tok = strings.ReplaceAll(tok, "~", "~0")
|
||||
tok = strings.ReplaceAll(tok, "/", "~1")
|
||||
return tok
|
||||
}
|
||||
|
||||
func unescape(tok string) (string, bool) {
|
||||
tilde := strings.IndexByte(tok, '~')
|
||||
if tilde == -1 {
|
||||
return tok, true
|
||||
}
|
||||
sb := new(strings.Builder)
|
||||
for {
|
||||
sb.WriteString(tok[:tilde])
|
||||
tok = tok[tilde+1:]
|
||||
if tok == "" {
|
||||
return "", false
|
||||
}
|
||||
switch tok[0] {
|
||||
case '0':
|
||||
sb.WriteByte('~')
|
||||
case '1':
|
||||
sb.WriteByte('/')
|
||||
default:
|
||||
return "", false
|
||||
}
|
||||
tok = tok[1:]
|
||||
tilde = strings.IndexByte(tok, '~')
|
||||
if tilde == -1 {
|
||||
sb.WriteString(tok)
|
||||
break
|
||||
}
|
||||
}
|
||||
return sb.String(), true
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) isEmpty() bool {
|
||||
return string(ptr) == ""
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) concat(next jsonPointer) jsonPointer {
|
||||
return jsonPointer(fmt.Sprintf("%s%s", ptr, next))
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) append(tok string) jsonPointer {
|
||||
return jsonPointer(fmt.Sprintf("%s/%s", ptr, escape(tok)))
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) append2(tok1, tok2 string) jsonPointer {
|
||||
return jsonPointer(fmt.Sprintf("%s/%s/%s", ptr, escape(tok1), escape(tok2)))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type anchor string
|
||||
|
||||
// --
|
||||
|
||||
type fragment string
|
||||
|
||||
func decode(frag string) (string, error) {
|
||||
return gourl.PathUnescape(frag)
|
||||
}
|
||||
|
||||
// avoids escaping /.
|
||||
func encode(frag string) string {
|
||||
var sb strings.Builder
|
||||
for i, tok := range strings.Split(frag, "/") {
|
||||
if i > 0 {
|
||||
sb.WriteByte('/')
|
||||
}
|
||||
sb.WriteString(gourl.PathEscape(tok))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func splitFragment(str string) (string, fragment, error) {
|
||||
u, f := split(str)
|
||||
f, err := decode(f)
|
||||
if err != nil {
|
||||
return "", fragment(""), &ParseURLError{URL: str, Err: err}
|
||||
}
|
||||
return u, fragment(f), nil
|
||||
}
|
||||
|
||||
func split(str string) (string, string) {
|
||||
hash := strings.IndexByte(str, '#')
|
||||
if hash == -1 {
|
||||
return str, ""
|
||||
}
|
||||
return str[:hash], str[hash+1:]
|
||||
}
|
||||
|
||||
func (frag fragment) convert() any {
|
||||
str := string(frag)
|
||||
if str == "" || strings.HasPrefix(str, "/") {
|
||||
return jsonPointer(str)
|
||||
}
|
||||
return anchor(str)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type urlFrag struct {
|
||||
url url
|
||||
frag fragment
|
||||
}
|
||||
|
||||
func startsWithWindowsDrive(s string) bool {
|
||||
if s != "" && strings.HasPrefix(s[1:], `:\`) {
|
||||
return (s[0] >= 'a' && s[0] <= 'z') || (s[0] >= 'A' && s[0] <= 'Z')
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func absolute(input string) (*urlFrag, error) {
|
||||
u, frag, err := splitFragment(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if windows absolute file path, convert to file url
|
||||
// because: net/url parses driver name as scheme
|
||||
if runtime.GOOS == "windows" && startsWithWindowsDrive(u) {
|
||||
u = "file:///" + filepath.ToSlash(u)
|
||||
}
|
||||
|
||||
gourl, err := gourl.Parse(u)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: input, Err: err}
|
||||
}
|
||||
if gourl.IsAbs() {
|
||||
return &urlFrag{url(u), frag}, nil
|
||||
}
|
||||
|
||||
// avoid filesystem api in wasm
|
||||
if runtime.GOOS != "js" {
|
||||
abs, err := filepath.Abs(u)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: input, Err: err}
|
||||
}
|
||||
u = abs
|
||||
}
|
||||
if !strings.HasPrefix(u, "/") {
|
||||
u = "/" + u
|
||||
}
|
||||
u = "file://" + filepath.ToSlash(u)
|
||||
|
||||
_, err = gourl.Parse(u)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: input, Err: err}
|
||||
}
|
||||
return &urlFrag{url: url(u), frag: frag}, nil
|
||||
}
|
||||
|
||||
func (uf *urlFrag) String() string {
|
||||
return fmt.Sprintf("%s#%s", uf.url, encode(string(uf.frag)))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type urlPtr struct {
|
||||
url url
|
||||
ptr jsonPointer
|
||||
}
|
||||
|
||||
func (up *urlPtr) lookup(v any) (any, error) {
|
||||
for _, tok := range strings.Split(string(up.ptr), "/")[1:] {
|
||||
tok, ok := unescape(tok)
|
||||
if !ok {
|
||||
return nil, &InvalidJsonPointerError{up.String()}
|
||||
}
|
||||
switch val := v.(type) {
|
||||
case map[string]any:
|
||||
if pvalue, ok := val[tok]; ok {
|
||||
v = pvalue
|
||||
continue
|
||||
}
|
||||
case []any:
|
||||
if index, err := strconv.Atoi(tok); err == nil {
|
||||
if index >= 0 && index < len(val) {
|
||||
v = val[index]
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, &JSONPointerNotFoundError{up.String()}
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func (up *urlPtr) format(tok string) string {
|
||||
return fmt.Sprintf("%s#%s/%s", up.url, encode(string(up.ptr)), encode(escape(tok)))
|
||||
}
|
||||
|
||||
func (up *urlPtr) String() string {
|
||||
return fmt.Sprintf("%s#%s", up.url, encode(string(up.ptr)))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func minInt(i, j int) int {
|
||||
if i < j {
|
||||
return i
|
||||
}
|
||||
return j
|
||||
}
|
||||
|
||||
func strVal(obj map[string]any, prop string) (string, bool) {
|
||||
v, ok := obj[prop]
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
s, ok := v.(string)
|
||||
return s, ok
|
||||
}
|
||||
|
||||
func isInteger(num any) bool {
|
||||
rat, ok := new(big.Rat).SetString(fmt.Sprint(num))
|
||||
return ok && rat.IsInt()
|
||||
}
|
||||
|
||||
// quote returns single-quoted string.
|
||||
// used for embedding quoted strings in json.
|
||||
func quote(s string) string {
|
||||
s = fmt.Sprintf("%q", s)
|
||||
s = strings.ReplaceAll(s, `\"`, `"`)
|
||||
s = strings.ReplaceAll(s, `'`, `\'`)
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
|
||||
func equals(v1, v2 any) (bool, ErrorKind) {
|
||||
switch v1 := v1.(type) {
|
||||
case map[string]any:
|
||||
v2, ok := v2.(map[string]any)
|
||||
if !ok || len(v1) != len(v2) {
|
||||
return false, nil
|
||||
}
|
||||
for k, val1 := range v1 {
|
||||
val2, ok := v2[k]
|
||||
if !ok {
|
||||
return false, nil
|
||||
}
|
||||
if ok, k := equals(val1, val2); !ok || k != nil {
|
||||
return ok, k
|
||||
}
|
||||
}
|
||||
return true, nil
|
||||
case []any:
|
||||
v2, ok := v2.([]any)
|
||||
if !ok || len(v1) != len(v2) {
|
||||
return false, nil
|
||||
}
|
||||
for i := range v1 {
|
||||
if ok, k := equals(v1[i], v2[i]); !ok || k != nil {
|
||||
return ok, k
|
||||
}
|
||||
}
|
||||
return true, nil
|
||||
case nil:
|
||||
return v2 == nil, nil
|
||||
case bool:
|
||||
v2, ok := v2.(bool)
|
||||
return ok && v1 == v2, nil
|
||||
case string:
|
||||
v2, ok := v2.(string)
|
||||
return ok && v1 == v2, nil
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
num1, ok1 := new(big.Rat).SetString(fmt.Sprint(v1))
|
||||
num2, ok2 := new(big.Rat).SetString(fmt.Sprint(v2))
|
||||
return ok1 && ok2 && num1.Cmp(num2) == 0, nil
|
||||
default:
|
||||
return false, &kind.InvalidJsonValue{Value: v1}
|
||||
}
|
||||
}
|
||||
|
||||
func duplicates(arr []any) (int, int, ErrorKind) {
|
||||
if len(arr) <= 20 {
|
||||
for i := 1; i < len(arr); i++ {
|
||||
for j := 0; j < i; j++ {
|
||||
if ok, k := equals(arr[i], arr[j]); ok || k != nil {
|
||||
return j, i, k
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1, -1, nil
|
||||
}
|
||||
|
||||
m := make(map[uint64][]int)
|
||||
h := new(maphash.Hash)
|
||||
for i, item := range arr {
|
||||
h.Reset()
|
||||
writeHash(item, h)
|
||||
hash := h.Sum64()
|
||||
indexes, ok := m[hash]
|
||||
if ok {
|
||||
for _, j := range indexes {
|
||||
if ok, k := equals(item, arr[j]); ok || k != nil {
|
||||
return j, i, k
|
||||
}
|
||||
}
|
||||
}
|
||||
indexes = append(indexes, i)
|
||||
m[hash] = indexes
|
||||
}
|
||||
return -1, -1, nil
|
||||
}
|
||||
|
||||
func writeHash(v any, h *maphash.Hash) ErrorKind {
|
||||
switch v := v.(type) {
|
||||
case map[string]any:
|
||||
_ = h.WriteByte(0)
|
||||
props := make([]string, 0, len(v))
|
||||
for prop := range v {
|
||||
props = append(props, prop)
|
||||
}
|
||||
slices.Sort(props)
|
||||
for _, prop := range props {
|
||||
writeHash(prop, h)
|
||||
writeHash(v[prop], h)
|
||||
}
|
||||
case []any:
|
||||
_ = h.WriteByte(1)
|
||||
for _, item := range v {
|
||||
writeHash(item, h)
|
||||
}
|
||||
case nil:
|
||||
_ = h.WriteByte(2)
|
||||
case bool:
|
||||
_ = h.WriteByte(3)
|
||||
if v {
|
||||
_ = h.WriteByte(1)
|
||||
} else {
|
||||
_ = h.WriteByte(0)
|
||||
}
|
||||
case string:
|
||||
_ = h.WriteByte(4)
|
||||
_, _ = h.WriteString(v)
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
_ = h.WriteByte(5)
|
||||
num, _ := new(big.Rat).SetString(fmt.Sprint(v))
|
||||
_, _ = h.Write(num.Num().Bytes())
|
||||
_, _ = h.Write(num.Denom().Bytes())
|
||||
default:
|
||||
return &kind.InvalidJsonValue{Value: v}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ParseURLError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *ParseURLError) Error() string {
|
||||
return fmt.Sprintf("error in parsing %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type InvalidJsonPointerError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *InvalidJsonPointerError) Error() string {
|
||||
return fmt.Sprintf("invalid json-pointer %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type JSONPointerNotFoundError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *JSONPointerNotFoundError) Error() string {
|
||||
return fmt.Sprintf("json-pointer in %q not found", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type SchemaValidationError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *SchemaValidationError) Error() string {
|
||||
return fmt.Sprintf("%q is not valid against metaschema: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// LocalizableError is an error whose message is localizable.
|
||||
func LocalizableError(format string, args ...any) error {
|
||||
return &localizableError{format, args}
|
||||
}
|
||||
|
||||
type localizableError struct {
|
||||
msg string
|
||||
args []any
|
||||
}
|
||||
|
||||
func (e *localizableError) Error() string {
|
||||
return fmt.Sprintf(e.msg, e.args...)
|
||||
}
|
||||
|
||||
func (e *localizableError) LocalizedError(p *message.Printer) string {
|
||||
return p.Sprintf(e.msg, e.args...)
|
||||
}
|
||||
975
vendor/github.com/santhosh-tekuri/jsonschema/v6/validator.go
generated
vendored
Normal file
975
vendor/github.com/santhosh-tekuri/jsonschema/v6/validator.go
generated
vendored
Normal file
|
|
@ -0,0 +1,975 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"slices"
|
||||
"strconv"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6/kind"
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
func (sch *Schema) Validate(v any) error {
|
||||
return sch.validate(v, nil, nil, nil, false, nil)
|
||||
}
|
||||
|
||||
func (sch *Schema) validate(v any, regexpEngine RegexpEngine, meta *Schema, resources map[jsonPointer]*resource, assertVocabs bool, vocabularies map[string]*Vocabulary) error {
|
||||
vd := validator{
|
||||
v: v,
|
||||
vloc: make([]string, 0, 8),
|
||||
sch: sch,
|
||||
scp: &scope{sch, "", 0, nil},
|
||||
uneval: unevalFrom(v, sch, false),
|
||||
errors: nil,
|
||||
boolResult: false,
|
||||
regexpEngine: regexpEngine,
|
||||
meta: meta,
|
||||
resources: resources,
|
||||
assertVocabs: assertVocabs,
|
||||
vocabularies: vocabularies,
|
||||
}
|
||||
if _, err := vd.validate(); err != nil {
|
||||
verr := err.(*ValidationError)
|
||||
var causes []*ValidationError
|
||||
if _, ok := verr.ErrorKind.(*kind.Group); ok {
|
||||
causes = verr.Causes
|
||||
} else {
|
||||
causes = []*ValidationError{verr}
|
||||
}
|
||||
return &ValidationError{
|
||||
SchemaURL: sch.Location,
|
||||
InstanceLocation: nil,
|
||||
ErrorKind: &kind.Schema{Location: sch.Location},
|
||||
Causes: causes,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type validator struct {
|
||||
v any
|
||||
vloc []string
|
||||
sch *Schema
|
||||
scp *scope
|
||||
uneval *uneval
|
||||
errors []*ValidationError
|
||||
boolResult bool // is interested to know valid or not (but not actuall error)
|
||||
regexpEngine RegexpEngine
|
||||
|
||||
// meta validation
|
||||
meta *Schema // set only when validating with metaschema
|
||||
resources map[jsonPointer]*resource // resources which should be validated with their dialect
|
||||
assertVocabs bool
|
||||
vocabularies map[string]*Vocabulary
|
||||
}
|
||||
|
||||
func (vd *validator) validate() (*uneval, error) {
|
||||
s := vd.sch
|
||||
v := vd.v
|
||||
|
||||
// boolean --
|
||||
if s.Bool != nil {
|
||||
if *s.Bool {
|
||||
return vd.uneval, nil
|
||||
} else {
|
||||
return nil, vd.error(&kind.FalseSchema{})
|
||||
}
|
||||
}
|
||||
|
||||
// check cycle --
|
||||
if scp := vd.scp.checkCycle(); scp != nil {
|
||||
return nil, vd.error(&kind.RefCycle{
|
||||
URL: s.Location,
|
||||
KeywordLocation1: vd.scp.kwLoc(),
|
||||
KeywordLocation2: scp.kwLoc(),
|
||||
})
|
||||
}
|
||||
|
||||
t := typeOf(v)
|
||||
if t == invalidType {
|
||||
return nil, vd.error(&kind.InvalidJsonValue{Value: v})
|
||||
}
|
||||
|
||||
// type --
|
||||
if s.Types != nil && !s.Types.IsEmpty() {
|
||||
matched := s.Types.contains(t) || (s.Types.contains(integerType) && t == numberType && isInteger(v))
|
||||
if !matched {
|
||||
return nil, vd.error(&kind.Type{Got: t.String(), Want: s.Types.ToStrings()})
|
||||
}
|
||||
}
|
||||
|
||||
// const --
|
||||
if s.Const != nil {
|
||||
ok, k := equals(v, *s.Const)
|
||||
if k != nil {
|
||||
return nil, vd.error(k)
|
||||
} else if !ok {
|
||||
return nil, vd.error(&kind.Const{Got: v, Want: *s.Const})
|
||||
}
|
||||
}
|
||||
|
||||
// enum --
|
||||
if s.Enum != nil {
|
||||
matched := s.Enum.types.contains(typeOf(v))
|
||||
if matched {
|
||||
matched = false
|
||||
for _, item := range s.Enum.Values {
|
||||
ok, k := equals(v, item)
|
||||
if k != nil {
|
||||
return nil, vd.error(k)
|
||||
} else if ok {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return nil, vd.error(&kind.Enum{Got: v, Want: s.Enum.Values})
|
||||
}
|
||||
}
|
||||
|
||||
// format --
|
||||
if s.Format != nil {
|
||||
var err error
|
||||
if s.Format.Name == "regex" && vd.regexpEngine != nil {
|
||||
err = vd.regexpEngine.validate(v)
|
||||
} else {
|
||||
err = s.Format.Validate(v)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, vd.error(&kind.Format{Got: v, Want: s.Format.Name, Err: err})
|
||||
}
|
||||
}
|
||||
|
||||
// $ref --
|
||||
if s.Ref != nil {
|
||||
err := vd.validateRef(s.Ref, "$ref")
|
||||
if s.DraftVersion < 2019 {
|
||||
return vd.uneval, err
|
||||
}
|
||||
if err != nil {
|
||||
vd.addErr(err)
|
||||
}
|
||||
}
|
||||
|
||||
// type specific validations --
|
||||
switch v := v.(type) {
|
||||
case map[string]any:
|
||||
vd.objValidate(v)
|
||||
case []any:
|
||||
vd.arrValidate(v)
|
||||
case string:
|
||||
vd.strValidate(v)
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
vd.numValidate(v)
|
||||
}
|
||||
|
||||
if len(vd.errors) == 0 || !vd.boolResult {
|
||||
if s.DraftVersion >= 2019 {
|
||||
vd.validateRefs()
|
||||
}
|
||||
vd.condValidate()
|
||||
|
||||
for _, ext := range s.Extensions {
|
||||
ext.Validate(&ValidatorContext{vd}, v)
|
||||
}
|
||||
|
||||
if s.DraftVersion >= 2019 {
|
||||
vd.unevalValidate()
|
||||
}
|
||||
}
|
||||
|
||||
switch len(vd.errors) {
|
||||
case 0:
|
||||
return vd.uneval, nil
|
||||
case 1:
|
||||
return nil, vd.errors[0]
|
||||
default:
|
||||
verr := vd.error(&kind.Group{})
|
||||
verr.Causes = vd.errors
|
||||
return nil, verr
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) objValidate(obj map[string]any) {
|
||||
s := vd.sch
|
||||
|
||||
// minProperties --
|
||||
if s.MinProperties != nil {
|
||||
if len(obj) < *s.MinProperties {
|
||||
vd.addError(&kind.MinProperties{Got: len(obj), Want: *s.MinProperties})
|
||||
}
|
||||
}
|
||||
|
||||
// maxProperties --
|
||||
if s.MaxProperties != nil {
|
||||
if len(obj) > *s.MaxProperties {
|
||||
vd.addError(&kind.MaxProperties{Got: len(obj), Want: *s.MaxProperties})
|
||||
}
|
||||
}
|
||||
|
||||
// required --
|
||||
if len(s.Required) > 0 {
|
||||
if missing := vd.findMissing(obj, s.Required); missing != nil {
|
||||
vd.addError(&kind.Required{Missing: missing})
|
||||
}
|
||||
}
|
||||
|
||||
if vd.boolResult && len(vd.errors) > 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// dependencies --
|
||||
for pname, dep := range s.Dependencies {
|
||||
if _, ok := obj[pname]; ok {
|
||||
switch dep := dep.(type) {
|
||||
case []string:
|
||||
if missing := vd.findMissing(obj, dep); missing != nil {
|
||||
vd.addError(&kind.Dependency{Prop: pname, Missing: missing})
|
||||
}
|
||||
case *Schema:
|
||||
vd.addErr(vd.validateSelf(dep, "", false))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var additionalPros []string
|
||||
for pname, pvalue := range obj {
|
||||
if vd.boolResult && len(vd.errors) > 0 {
|
||||
return
|
||||
}
|
||||
evaluated := false
|
||||
|
||||
// properties --
|
||||
if sch, ok := s.Properties[pname]; ok {
|
||||
evaluated = true
|
||||
vd.addErr(vd.validateVal(sch, pvalue, pname))
|
||||
}
|
||||
|
||||
// patternProperties --
|
||||
for regex, sch := range s.PatternProperties {
|
||||
if regex.MatchString(pname) {
|
||||
evaluated = true
|
||||
vd.addErr(vd.validateVal(sch, pvalue, pname))
|
||||
}
|
||||
}
|
||||
|
||||
if !evaluated && s.AdditionalProperties != nil {
|
||||
evaluated = true
|
||||
switch additional := s.AdditionalProperties.(type) {
|
||||
case bool:
|
||||
if !additional {
|
||||
additionalPros = append(additionalPros, pname)
|
||||
}
|
||||
case *Schema:
|
||||
vd.addErr(vd.validateVal(additional, pvalue, pname))
|
||||
}
|
||||
}
|
||||
|
||||
if evaluated {
|
||||
delete(vd.uneval.props, pname)
|
||||
}
|
||||
}
|
||||
if len(additionalPros) > 0 {
|
||||
vd.addError(&kind.AdditionalProperties{Properties: additionalPros})
|
||||
}
|
||||
|
||||
if s.DraftVersion == 4 {
|
||||
return
|
||||
}
|
||||
|
||||
// propertyNames --
|
||||
if s.PropertyNames != nil {
|
||||
for pname := range obj {
|
||||
sch, meta, resources := s.PropertyNames, vd.meta, vd.resources
|
||||
res := vd.metaResource(sch)
|
||||
if res != nil {
|
||||
meta = res.dialect.getSchema(vd.assertVocabs, vd.vocabularies)
|
||||
sch = meta
|
||||
}
|
||||
if err := sch.validate(pname, vd.regexpEngine, meta, resources, vd.assertVocabs, vd.vocabularies); err != nil {
|
||||
verr := err.(*ValidationError)
|
||||
verr.SchemaURL = s.PropertyNames.Location
|
||||
verr.ErrorKind = &kind.PropertyNames{Property: pname}
|
||||
vd.addErr(verr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.DraftVersion == 6 {
|
||||
return
|
||||
}
|
||||
|
||||
// dependentSchemas --
|
||||
for pname, sch := range s.DependentSchemas {
|
||||
if _, ok := obj[pname]; ok {
|
||||
vd.addErr(vd.validateSelf(sch, "", false))
|
||||
}
|
||||
}
|
||||
|
||||
// dependentRequired --
|
||||
for pname, reqd := range s.DependentRequired {
|
||||
if _, ok := obj[pname]; ok {
|
||||
if missing := vd.findMissing(obj, reqd); missing != nil {
|
||||
vd.addError(&kind.DependentRequired{Prop: pname, Missing: missing})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) arrValidate(arr []any) {
|
||||
s := vd.sch
|
||||
|
||||
// minItems --
|
||||
if s.MinItems != nil {
|
||||
if len(arr) < *s.MinItems {
|
||||
vd.addError(&kind.MinItems{Got: len(arr), Want: *s.MinItems})
|
||||
}
|
||||
}
|
||||
|
||||
// maxItems --
|
||||
if s.MaxItems != nil {
|
||||
if len(arr) > *s.MaxItems {
|
||||
vd.addError(&kind.MaxItems{Got: len(arr), Want: *s.MaxItems})
|
||||
}
|
||||
}
|
||||
|
||||
// uniqueItems --
|
||||
if s.UniqueItems && len(arr) > 1 {
|
||||
i, j, k := duplicates(arr)
|
||||
if k != nil {
|
||||
vd.addError(k)
|
||||
} else if i != -1 {
|
||||
vd.addError(&kind.UniqueItems{Duplicates: [2]int{i, j}})
|
||||
}
|
||||
}
|
||||
|
||||
if s.DraftVersion < 2020 {
|
||||
evaluated := 0
|
||||
|
||||
// items --
|
||||
switch items := s.Items.(type) {
|
||||
case *Schema:
|
||||
for i, item := range arr {
|
||||
vd.addErr(vd.validateVal(items, item, strconv.Itoa(i)))
|
||||
}
|
||||
evaluated = len(arr)
|
||||
case []*Schema:
|
||||
min := minInt(len(arr), len(items))
|
||||
for i, item := range arr[:min] {
|
||||
vd.addErr(vd.validateVal(items[i], item, strconv.Itoa(i)))
|
||||
}
|
||||
evaluated = min
|
||||
}
|
||||
|
||||
// additionalItems --
|
||||
if s.AdditionalItems != nil {
|
||||
switch additional := s.AdditionalItems.(type) {
|
||||
case bool:
|
||||
if !additional && evaluated != len(arr) {
|
||||
vd.addError(&kind.AdditionalItems{Count: len(arr) - evaluated})
|
||||
}
|
||||
case *Schema:
|
||||
for i, item := range arr[evaluated:] {
|
||||
vd.addErr(vd.validateVal(additional, item, strconv.Itoa(i)))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
evaluated := minInt(len(s.PrefixItems), len(arr))
|
||||
|
||||
// prefixItems --
|
||||
for i, item := range arr[:evaluated] {
|
||||
vd.addErr(vd.validateVal(s.PrefixItems[i], item, strconv.Itoa(i)))
|
||||
}
|
||||
|
||||
// items2020 --
|
||||
if s.Items2020 != nil {
|
||||
for i, item := range arr[evaluated:] {
|
||||
vd.addErr(vd.validateVal(s.Items2020, item, strconv.Itoa(i)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// contains --
|
||||
if s.Contains != nil {
|
||||
var errors []*ValidationError
|
||||
var matched []int
|
||||
|
||||
for i, item := range arr {
|
||||
if err := vd.validateVal(s.Contains, item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
} else {
|
||||
matched = append(matched, i)
|
||||
if s.DraftVersion >= 2020 {
|
||||
delete(vd.uneval.items, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// minContains --
|
||||
if s.MinContains != nil {
|
||||
if len(matched) < *s.MinContains {
|
||||
vd.addErrors(errors, &kind.MinContains{Got: matched, Want: *s.MinContains})
|
||||
}
|
||||
} else if len(matched) == 0 {
|
||||
vd.addErrors(errors, &kind.Contains{})
|
||||
}
|
||||
|
||||
// maxContains --
|
||||
if s.MaxContains != nil {
|
||||
if len(matched) > *s.MaxContains {
|
||||
vd.addError(&kind.MaxContains{Got: matched, Want: *s.MaxContains})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) strValidate(str string) {
|
||||
s := vd.sch
|
||||
|
||||
strLen := -1
|
||||
if s.MinLength != nil || s.MaxLength != nil {
|
||||
strLen = utf8.RuneCount([]byte(str))
|
||||
}
|
||||
|
||||
// minLength --
|
||||
if s.MinLength != nil {
|
||||
if strLen < *s.MinLength {
|
||||
vd.addError(&kind.MinLength{Got: strLen, Want: *s.MinLength})
|
||||
}
|
||||
}
|
||||
|
||||
// maxLength --
|
||||
if s.MaxLength != nil {
|
||||
if strLen > *s.MaxLength {
|
||||
vd.addError(&kind.MaxLength{Got: strLen, Want: *s.MaxLength})
|
||||
}
|
||||
}
|
||||
|
||||
// pattern --
|
||||
if s.Pattern != nil {
|
||||
if !s.Pattern.MatchString(str) {
|
||||
vd.addError(&kind.Pattern{Got: str, Want: s.Pattern.String()})
|
||||
}
|
||||
}
|
||||
|
||||
if s.DraftVersion == 6 {
|
||||
return
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
// contentEncoding --
|
||||
decoded := []byte(str)
|
||||
if s.ContentEncoding != nil {
|
||||
decoded, err = s.ContentEncoding.Decode(str)
|
||||
if err != nil {
|
||||
decoded = nil
|
||||
vd.addError(&kind.ContentEncoding{Want: s.ContentEncoding.Name, Err: err})
|
||||
}
|
||||
}
|
||||
|
||||
var deserialized *any
|
||||
if decoded != nil && s.ContentMediaType != nil {
|
||||
if s.ContentSchema == nil {
|
||||
err = s.ContentMediaType.Validate(decoded)
|
||||
} else {
|
||||
var value any
|
||||
value, err = s.ContentMediaType.UnmarshalJSON(decoded)
|
||||
if err == nil {
|
||||
deserialized = &value
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
vd.addError(&kind.ContentMediaType{
|
||||
Got: decoded,
|
||||
Want: s.ContentMediaType.Name,
|
||||
Err: err,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if deserialized != nil && s.ContentSchema != nil {
|
||||
sch, meta, resources := s.ContentSchema, vd.meta, vd.resources
|
||||
res := vd.metaResource(sch)
|
||||
if res != nil {
|
||||
meta = res.dialect.getSchema(vd.assertVocabs, vd.vocabularies)
|
||||
sch = meta
|
||||
}
|
||||
if err = sch.validate(*deserialized, vd.regexpEngine, meta, resources, vd.assertVocabs, vd.vocabularies); err != nil {
|
||||
verr := err.(*ValidationError)
|
||||
verr.SchemaURL = s.Location
|
||||
verr.ErrorKind = &kind.ContentSchema{}
|
||||
vd.addErr(verr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) numValidate(v any) {
|
||||
s := vd.sch
|
||||
|
||||
var numVal *big.Rat
|
||||
num := func() *big.Rat {
|
||||
if numVal == nil {
|
||||
numVal, _ = new(big.Rat).SetString(fmt.Sprintf("%v", v))
|
||||
}
|
||||
return numVal
|
||||
}
|
||||
|
||||
// minimum --
|
||||
if s.Minimum != nil && num().Cmp(s.Minimum) < 0 {
|
||||
vd.addError(&kind.Minimum{Got: num(), Want: s.Minimum})
|
||||
}
|
||||
|
||||
// maximum --
|
||||
if s.Maximum != nil && num().Cmp(s.Maximum) > 0 {
|
||||
vd.addError(&kind.Maximum{Got: num(), Want: s.Maximum})
|
||||
}
|
||||
|
||||
// exclusiveMinimum
|
||||
if s.ExclusiveMinimum != nil && num().Cmp(s.ExclusiveMinimum) <= 0 {
|
||||
vd.addError(&kind.ExclusiveMinimum{Got: num(), Want: s.ExclusiveMinimum})
|
||||
}
|
||||
|
||||
// exclusiveMaximum
|
||||
if s.ExclusiveMaximum != nil && num().Cmp(s.ExclusiveMaximum) >= 0 {
|
||||
vd.addError(&kind.ExclusiveMaximum{Got: num(), Want: s.ExclusiveMaximum})
|
||||
}
|
||||
|
||||
// multipleOf
|
||||
if s.MultipleOf != nil {
|
||||
if q := new(big.Rat).Quo(num(), s.MultipleOf); !q.IsInt() {
|
||||
vd.addError(&kind.MultipleOf{Got: num(), Want: s.MultipleOf})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) condValidate() {
|
||||
s := vd.sch
|
||||
|
||||
// not --
|
||||
if s.Not != nil {
|
||||
if vd.validateSelf(s.Not, "", true) == nil {
|
||||
vd.addError(&kind.Not{})
|
||||
}
|
||||
}
|
||||
|
||||
// allOf --
|
||||
if len(s.AllOf) > 0 {
|
||||
var errors []*ValidationError
|
||||
for _, sch := range s.AllOf {
|
||||
if err := vd.validateSelf(sch, "", false); err != nil {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
if vd.boolResult {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(errors) != 0 {
|
||||
vd.addErrors(errors, &kind.AllOf{})
|
||||
}
|
||||
}
|
||||
|
||||
// anyOf
|
||||
if len(s.AnyOf) > 0 {
|
||||
var matched bool
|
||||
var errors []*ValidationError
|
||||
for _, sch := range s.AnyOf {
|
||||
if err := vd.validateSelf(sch, "", false); err != nil {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
} else {
|
||||
matched = true
|
||||
// for uneval, all schemas must be evaluated
|
||||
if vd.uneval.isEmpty() {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
vd.addErrors(errors, &kind.AnyOf{})
|
||||
}
|
||||
}
|
||||
|
||||
// oneOf
|
||||
if len(s.OneOf) > 0 {
|
||||
var matched = -1
|
||||
var errors []*ValidationError
|
||||
for i, sch := range s.OneOf {
|
||||
if err := vd.validateSelf(sch, "", matched != -1); err != nil {
|
||||
if matched == -1 {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
}
|
||||
} else {
|
||||
if matched == -1 {
|
||||
matched = i
|
||||
} else {
|
||||
vd.addError(&kind.OneOf{Subschemas: []int{matched, i}})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if matched == -1 {
|
||||
vd.addErrors(errors, &kind.OneOf{Subschemas: nil})
|
||||
}
|
||||
}
|
||||
|
||||
// if, then, else --
|
||||
if s.If != nil {
|
||||
if vd.validateSelf(s.If, "", true) == nil {
|
||||
if s.Then != nil {
|
||||
vd.addErr(vd.validateSelf(s.Then, "", false))
|
||||
}
|
||||
} else if s.Else != nil {
|
||||
vd.addErr(vd.validateSelf(s.Else, "", false))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) unevalValidate() {
|
||||
s := vd.sch
|
||||
|
||||
// unevaluatedProperties
|
||||
if obj, ok := vd.v.(map[string]any); ok && s.UnevaluatedProperties != nil {
|
||||
for pname := range vd.uneval.props {
|
||||
if pvalue, ok := obj[pname]; ok {
|
||||
vd.addErr(vd.validateVal(s.UnevaluatedProperties, pvalue, pname))
|
||||
}
|
||||
}
|
||||
vd.uneval.props = nil
|
||||
}
|
||||
|
||||
// unevaluatedItems
|
||||
if arr, ok := vd.v.([]any); ok && s.UnevaluatedItems != nil {
|
||||
for i := range vd.uneval.items {
|
||||
vd.addErr(vd.validateVal(s.UnevaluatedItems, arr[i], strconv.Itoa(i)))
|
||||
}
|
||||
vd.uneval.items = nil
|
||||
}
|
||||
}
|
||||
|
||||
// validation helpers --
|
||||
|
||||
func (vd *validator) validateSelf(sch *Schema, refKw string, boolResult bool) error {
|
||||
scp := vd.scp.child(sch, refKw, vd.scp.vid)
|
||||
uneval := unevalFrom(vd.v, sch, !vd.uneval.isEmpty())
|
||||
subvd := validator{
|
||||
v: vd.v,
|
||||
vloc: vd.vloc,
|
||||
sch: sch,
|
||||
scp: scp,
|
||||
uneval: uneval,
|
||||
errors: nil,
|
||||
boolResult: vd.boolResult || boolResult,
|
||||
regexpEngine: vd.regexpEngine,
|
||||
meta: vd.meta,
|
||||
resources: vd.resources,
|
||||
assertVocabs: vd.assertVocabs,
|
||||
vocabularies: vd.vocabularies,
|
||||
}
|
||||
subvd.handleMeta()
|
||||
uneval, err := subvd.validate()
|
||||
if err == nil {
|
||||
vd.uneval.merge(uneval)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (vd *validator) validateVal(sch *Schema, v any, vtok string) error {
|
||||
vloc := append(vd.vloc, vtok)
|
||||
scp := vd.scp.child(sch, "", vd.scp.vid+1)
|
||||
uneval := unevalFrom(v, sch, false)
|
||||
subvd := validator{
|
||||
v: v,
|
||||
vloc: vloc,
|
||||
sch: sch,
|
||||
scp: scp,
|
||||
uneval: uneval,
|
||||
errors: nil,
|
||||
boolResult: vd.boolResult,
|
||||
regexpEngine: vd.regexpEngine,
|
||||
meta: vd.meta,
|
||||
resources: vd.resources,
|
||||
assertVocabs: vd.assertVocabs,
|
||||
vocabularies: vd.vocabularies,
|
||||
}
|
||||
subvd.handleMeta()
|
||||
_, err := subvd.validate()
|
||||
return err
|
||||
}
|
||||
|
||||
func (vd *validator) validateValue(sch *Schema, v any, vpath []string) error {
|
||||
vloc := append(vd.vloc, vpath...)
|
||||
scp := vd.scp.child(sch, "", vd.scp.vid+1)
|
||||
uneval := unevalFrom(v, sch, false)
|
||||
subvd := validator{
|
||||
v: v,
|
||||
vloc: vloc,
|
||||
sch: sch,
|
||||
scp: scp,
|
||||
uneval: uneval,
|
||||
errors: nil,
|
||||
boolResult: vd.boolResult,
|
||||
regexpEngine: vd.regexpEngine,
|
||||
meta: vd.meta,
|
||||
resources: vd.resources,
|
||||
assertVocabs: vd.assertVocabs,
|
||||
vocabularies: vd.vocabularies,
|
||||
}
|
||||
subvd.handleMeta()
|
||||
_, err := subvd.validate()
|
||||
return err
|
||||
}
|
||||
|
||||
func (vd *validator) metaResource(sch *Schema) *resource {
|
||||
if sch != vd.meta {
|
||||
return nil
|
||||
}
|
||||
ptr := ""
|
||||
for _, tok := range vd.instanceLocation() {
|
||||
ptr += "/"
|
||||
ptr += escape(tok)
|
||||
}
|
||||
return vd.resources[jsonPointer(ptr)]
|
||||
}
|
||||
|
||||
func (vd *validator) handleMeta() {
|
||||
res := vd.metaResource(vd.sch)
|
||||
if res == nil {
|
||||
return
|
||||
}
|
||||
sch := res.dialect.getSchema(vd.assertVocabs, vd.vocabularies)
|
||||
vd.meta = sch
|
||||
vd.sch = sch
|
||||
}
|
||||
|
||||
// reference validation --
|
||||
|
||||
func (vd *validator) validateRef(sch *Schema, kw string) error {
|
||||
err := vd.validateSelf(sch, kw, false)
|
||||
if err != nil {
|
||||
refErr := vd.error(&kind.Reference{Keyword: kw, URL: sch.Location})
|
||||
verr := err.(*ValidationError)
|
||||
if _, ok := verr.ErrorKind.(*kind.Group); ok {
|
||||
refErr.Causes = verr.Causes
|
||||
} else {
|
||||
refErr.Causes = append(refErr.Causes, verr)
|
||||
}
|
||||
return refErr
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vd *validator) resolveRecursiveAnchor(fallback *Schema) *Schema {
|
||||
sch := fallback
|
||||
scp := vd.scp
|
||||
for scp != nil {
|
||||
if scp.sch.resource.RecursiveAnchor {
|
||||
sch = scp.sch
|
||||
}
|
||||
scp = scp.parent
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (vd *validator) resolveDynamicAnchor(name string, fallback *Schema) *Schema {
|
||||
sch := fallback
|
||||
scp := vd.scp
|
||||
for scp != nil {
|
||||
if dsch, ok := scp.sch.resource.dynamicAnchors[name]; ok {
|
||||
sch = dsch
|
||||
}
|
||||
scp = scp.parent
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (vd *validator) validateRefs() {
|
||||
// $recursiveRef --
|
||||
if sch := vd.sch.RecursiveRef; sch != nil {
|
||||
if sch.RecursiveAnchor {
|
||||
sch = vd.resolveRecursiveAnchor(sch)
|
||||
}
|
||||
vd.addErr(vd.validateRef(sch, "$recursiveRef"))
|
||||
}
|
||||
|
||||
// $dynamicRef --
|
||||
if dref := vd.sch.DynamicRef; dref != nil {
|
||||
sch := dref.Ref // initial target
|
||||
if dref.Anchor != "" {
|
||||
// $dynamicRef includes anchor
|
||||
if sch.DynamicAnchor == dref.Anchor {
|
||||
// initial target has matching $dynamicAnchor
|
||||
sch = vd.resolveDynamicAnchor(dref.Anchor, sch)
|
||||
}
|
||||
}
|
||||
vd.addErr(vd.validateRef(sch, "$dynamicRef"))
|
||||
}
|
||||
}
|
||||
|
||||
// error helpers --
|
||||
|
||||
func (vd *validator) instanceLocation() []string {
|
||||
return slices.Clone(vd.vloc)
|
||||
}
|
||||
|
||||
func (vd *validator) error(kind ErrorKind) *ValidationError {
|
||||
if vd.boolResult {
|
||||
return &ValidationError{}
|
||||
}
|
||||
return &ValidationError{
|
||||
SchemaURL: vd.sch.Location,
|
||||
InstanceLocation: vd.instanceLocation(),
|
||||
ErrorKind: kind,
|
||||
Causes: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) addErr(err error) {
|
||||
if err != nil {
|
||||
vd.errors = append(vd.errors, err.(*ValidationError))
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) addError(kind ErrorKind) {
|
||||
vd.errors = append(vd.errors, vd.error(kind))
|
||||
}
|
||||
|
||||
func (vd *validator) addErrors(errors []*ValidationError, kind ErrorKind) {
|
||||
err := vd.error(kind)
|
||||
err.Causes = errors
|
||||
vd.errors = append(vd.errors, err)
|
||||
}
|
||||
|
||||
func (vd *validator) findMissing(obj map[string]any, reqd []string) []string {
|
||||
var missing []string
|
||||
for _, pname := range reqd {
|
||||
if _, ok := obj[pname]; !ok {
|
||||
if vd.boolResult {
|
||||
return []string{} // non-nil
|
||||
}
|
||||
missing = append(missing, pname)
|
||||
}
|
||||
}
|
||||
return missing
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type scope struct {
|
||||
sch *Schema
|
||||
|
||||
// if empty, compute from self.sch and self.parent.sch.
|
||||
// not empty, only when there is a jump i.e, $ref, $XXXRef
|
||||
refKeyword string
|
||||
|
||||
// unique id of value being validated
|
||||
// if two scopes validate same value, they will have
|
||||
// same vid
|
||||
vid int
|
||||
|
||||
parent *scope
|
||||
}
|
||||
|
||||
func (sc *scope) child(sch *Schema, refKeyword string, vid int) *scope {
|
||||
return &scope{sch, refKeyword, vid, sc}
|
||||
}
|
||||
|
||||
func (sc *scope) checkCycle() *scope {
|
||||
scp := sc.parent
|
||||
for scp != nil {
|
||||
if scp.vid != sc.vid {
|
||||
break
|
||||
}
|
||||
if scp.sch == sc.sch {
|
||||
return scp
|
||||
}
|
||||
scp = scp.parent
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sc *scope) kwLoc() string {
|
||||
var loc string
|
||||
for sc.parent != nil {
|
||||
if sc.refKeyword != "" {
|
||||
loc = fmt.Sprintf("/%s%s", escape(sc.refKeyword), loc)
|
||||
} else {
|
||||
cur := sc.sch.Location
|
||||
parent := sc.parent.sch.Location
|
||||
loc = fmt.Sprintf("%s%s", cur[len(parent):], loc)
|
||||
}
|
||||
sc = sc.parent
|
||||
}
|
||||
return loc
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type uneval struct {
|
||||
props map[string]struct{}
|
||||
items map[int]struct{}
|
||||
}
|
||||
|
||||
func unevalFrom(v any, sch *Schema, callerNeeds bool) *uneval {
|
||||
uneval := &uneval{}
|
||||
switch v := v.(type) {
|
||||
case map[string]any:
|
||||
if !sch.allPropsEvaluated && (callerNeeds || sch.UnevaluatedProperties != nil) {
|
||||
uneval.props = map[string]struct{}{}
|
||||
for k := range v {
|
||||
uneval.props[k] = struct{}{}
|
||||
}
|
||||
}
|
||||
case []any:
|
||||
if !sch.allItemsEvaluated && (callerNeeds || sch.UnevaluatedItems != nil) && sch.numItemsEvaluated < len(v) {
|
||||
uneval.items = map[int]struct{}{}
|
||||
for i := sch.numItemsEvaluated; i < len(v); i++ {
|
||||
uneval.items[i] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
return uneval
|
||||
}
|
||||
|
||||
func (ue *uneval) merge(other *uneval) {
|
||||
for k := range ue.props {
|
||||
if _, ok := other.props[k]; !ok {
|
||||
delete(ue.props, k)
|
||||
}
|
||||
}
|
||||
for i := range ue.items {
|
||||
if _, ok := other.items[i]; !ok {
|
||||
delete(ue.items, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (ue *uneval) isEmpty() bool {
|
||||
return len(ue.props) == 0 && len(ue.items) == 0
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ValidationError struct {
|
||||
// absolute, dereferenced schema location.
|
||||
SchemaURL string
|
||||
|
||||
// location of the JSON value within the instance being validated.
|
||||
InstanceLocation []string
|
||||
|
||||
// kind of error
|
||||
ErrorKind ErrorKind
|
||||
|
||||
// holds nested errors
|
||||
Causes []*ValidationError
|
||||
}
|
||||
|
||||
type ErrorKind interface {
|
||||
KeywordPath() []string
|
||||
LocalizedString(*message.Printer) string
|
||||
}
|
||||
106
vendor/github.com/santhosh-tekuri/jsonschema/v6/vocab.go
generated
vendored
Normal file
106
vendor/github.com/santhosh-tekuri/jsonschema/v6/vocab.go
generated
vendored
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
package jsonschema
|
||||
|
||||
// CompilerContext provides helpers for
|
||||
// compiling a [Vocabulary].
|
||||
type CompilerContext struct {
|
||||
c *objCompiler
|
||||
}
|
||||
|
||||
func (ctx *CompilerContext) Enqueue(schPath []string) *Schema {
|
||||
ptr := ctx.c.up.ptr
|
||||
for _, tok := range schPath {
|
||||
ptr = ptr.append(tok)
|
||||
}
|
||||
return ctx.c.enqueuePtr(ptr)
|
||||
}
|
||||
|
||||
// Vocabulary defines a set of keywords, their syntax and
|
||||
// their semantics.
|
||||
type Vocabulary struct {
|
||||
// URL identifier for this Vocabulary.
|
||||
URL string
|
||||
|
||||
// Schema that is used to validate the keywords that is introduced by this
|
||||
// vocabulary.
|
||||
Schema *Schema
|
||||
|
||||
// Subschemas lists the possible locations of subschemas introduced by
|
||||
// this vocabulary.
|
||||
Subschemas []SchemaPath
|
||||
|
||||
// Compile compiles the keywords(introduced by this vocabulary) in obj into [SchemaExt].
|
||||
// If obj does not contain any keywords introduced by this vocabulary, nil SchemaExt must
|
||||
// be returned.
|
||||
Compile func(ctx *CompilerContext, obj map[string]any) (SchemaExt, error)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// SchemaExt is compled form of vocabulary.
|
||||
type SchemaExt interface {
|
||||
// Validate validates v against and errors if any are reported
|
||||
// to ctx.
|
||||
Validate(ctx *ValidatorContext, v any)
|
||||
}
|
||||
|
||||
// ValidatorContext provides helpers for
|
||||
// validating with [SchemaExt].
|
||||
type ValidatorContext struct {
|
||||
vd *validator
|
||||
}
|
||||
|
||||
// Validate validates v with sch. vpath gives path of v from current context value.
|
||||
func (ctx *ValidatorContext) Validate(sch *Schema, v any, vpath []string) error {
|
||||
switch len(vpath) {
|
||||
case 0:
|
||||
return ctx.vd.validateSelf(sch, "", false)
|
||||
case 1:
|
||||
return ctx.vd.validateVal(sch, v, vpath[0])
|
||||
default:
|
||||
return ctx.vd.validateValue(sch, v, vpath)
|
||||
}
|
||||
}
|
||||
|
||||
// EvaluatedProp marks given property of current object as evaluated.
|
||||
func (ctx *ValidatorContext) EvaluatedProp(pname string) {
|
||||
delete(ctx.vd.uneval.props, pname)
|
||||
}
|
||||
|
||||
// EvaluatedItem marks items at given index of current array as evaluated.
|
||||
func (ctx *ValidatorContext) EvaluatedItem(index int) {
|
||||
delete(ctx.vd.uneval.items, index)
|
||||
}
|
||||
|
||||
// AddError reports validation-error of given kind.
|
||||
func (ctx *ValidatorContext) AddError(k ErrorKind) {
|
||||
ctx.vd.addError(k)
|
||||
}
|
||||
|
||||
// AddErrors reports validation-errors of given kind.
|
||||
func (ctx *ValidatorContext) AddErrors(errors []*ValidationError, k ErrorKind) {
|
||||
ctx.vd.addErrors(errors, k)
|
||||
}
|
||||
|
||||
// AddErr reports the given err. This is typically used to report
|
||||
// the error created by subschema validation.
|
||||
//
|
||||
// NOTE that err must be of type *ValidationError.
|
||||
func (ctx *ValidatorContext) AddErr(err error) {
|
||||
ctx.vd.addErr(err)
|
||||
}
|
||||
|
||||
func (ctx *ValidatorContext) Equals(v1, v2 any) (bool, error) {
|
||||
b, k := equals(v1, v2)
|
||||
if k != nil {
|
||||
return false, ctx.vd.error(k)
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func (ctx *ValidatorContext) Duplicates(arr []any) (int, int, error) {
|
||||
i, j, k := duplicates(arr)
|
||||
if k != nil {
|
||||
return -1, -1, ctx.vd.error(k)
|
||||
}
|
||||
return i, j, nil
|
||||
}
|
||||
27
vendor/golang.org/x/text/LICENSE
generated
vendored
Normal file
27
vendor/golang.org/x/text/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
Copyright 2009 The Go Authors.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google LLC nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
22
vendor/golang.org/x/text/PATENTS
generated
vendored
Normal file
22
vendor/golang.org/x/text/PATENTS
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
Additional IP Rights Grant (Patents)
|
||||
|
||||
"This implementation" means the copyrightable works distributed by
|
||||
Google as part of the Go project.
|
||||
|
||||
Google hereby grants to You a perpetual, worldwide, non-exclusive,
|
||||
no-charge, royalty-free, irrevocable (except as stated in this section)
|
||||
patent license to make, have made, use, offer to sell, sell, import,
|
||||
transfer and otherwise run, modify and propagate the contents of this
|
||||
implementation of Go, where such license applies only to those patent
|
||||
claims, both currently owned or controlled by Google and acquired in
|
||||
the future, licensable by Google that are necessarily infringed by this
|
||||
implementation of Go. This grant does not include claims that would be
|
||||
infringed only as a consequence of further modification of this
|
||||
implementation. If you or your agent or exclusive licensee institute or
|
||||
order or agree to the institution of patent litigation against any
|
||||
entity (including a cross-claim or counterclaim in a lawsuit) alleging
|
||||
that this implementation of Go or any code incorporated within this
|
||||
implementation of Go constitutes direct or contributory patent
|
||||
infringement, or inducement of patent infringement, then any patent
|
||||
rights granted to you under this License for this implementation of Go
|
||||
shall terminate as of the date such litigation is filed.
|
||||
70
vendor/golang.org/x/text/feature/plural/common.go
generated
vendored
Normal file
70
vendor/golang.org/x/text/feature/plural/common.go
generated
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
|
||||
|
||||
package plural
|
||||
|
||||
// Form defines a plural form.
|
||||
//
|
||||
// Not all languages support all forms. Also, the meaning of each form varies
|
||||
// per language. It is important to note that the name of a form does not
|
||||
// necessarily correspond one-to-one with the set of numbers. For instance,
|
||||
// for Croation, One matches not only 1, but also 11, 21, etc.
|
||||
//
|
||||
// Each language must at least support the form "other".
|
||||
type Form byte
|
||||
|
||||
const (
|
||||
Other Form = iota
|
||||
Zero
|
||||
One
|
||||
Two
|
||||
Few
|
||||
Many
|
||||
)
|
||||
|
||||
var countMap = map[string]Form{
|
||||
"other": Other,
|
||||
"zero": Zero,
|
||||
"one": One,
|
||||
"two": Two,
|
||||
"few": Few,
|
||||
"many": Many,
|
||||
}
|
||||
|
||||
type pluralCheck struct {
|
||||
// category:
|
||||
// 3..7: opID
|
||||
// 0..2: category
|
||||
cat byte
|
||||
setID byte
|
||||
}
|
||||
|
||||
// opID identifies the type of operand in the plural rule, being i, n or f.
|
||||
// (v, w, and t are treated as filters in our implementation.)
|
||||
type opID byte
|
||||
|
||||
const (
|
||||
opMod opID = 0x1 // is '%' used?
|
||||
opNotEqual opID = 0x2 // using "!=" to compare
|
||||
opI opID = 0 << 2 // integers after taking the absolute value
|
||||
opN opID = 1 << 2 // full number (must be integer)
|
||||
opF opID = 2 << 2 // fraction
|
||||
opV opID = 3 << 2 // number of visible digits
|
||||
opW opID = 4 << 2 // number of visible digits without trailing zeros
|
||||
opBretonM opID = 5 << 2 // hard-wired rule for Breton
|
||||
opItalian800 opID = 6 << 2 // hard-wired rule for Italian
|
||||
opAzerbaijan00s opID = 7 << 2 // hard-wired rule for Azerbaijan
|
||||
)
|
||||
const (
|
||||
// Use this plural form to indicate the next rule needs to match as well.
|
||||
// The last condition in the list will have the correct plural form.
|
||||
andNext = 0x7
|
||||
formMask = 0x7
|
||||
|
||||
opShift = 3
|
||||
|
||||
// numN indicates the maximum integer, or maximum mod value, for which we
|
||||
// have inclusion masks.
|
||||
numN = 100
|
||||
// The common denominator of the modulo that is taken.
|
||||
maxMod = 100
|
||||
)
|
||||
244
vendor/golang.org/x/text/feature/plural/message.go
generated
vendored
Normal file
244
vendor/golang.org/x/text/feature/plural/message.go
generated
vendored
Normal file
|
|
@ -0,0 +1,244 @@
|
|||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package plural
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"strconv"
|
||||
|
||||
"golang.org/x/text/internal/catmsg"
|
||||
"golang.org/x/text/internal/number"
|
||||
"golang.org/x/text/language"
|
||||
"golang.org/x/text/message/catalog"
|
||||
)
|
||||
|
||||
// TODO: consider deleting this interface. Maybe VisibleDigits is always
|
||||
// sufficient and practical.
|
||||
|
||||
// Interface is used for types that can determine their own plural form.
|
||||
type Interface interface {
|
||||
// PluralForm reports the plural form for the given language of the
|
||||
// underlying value. It also returns the integer value. If the integer value
|
||||
// is larger than fits in n, PluralForm may return a value modulo
|
||||
// 10,000,000.
|
||||
PluralForm(t language.Tag, scale int) (f Form, n int)
|
||||
}
|
||||
|
||||
// Selectf returns the first case for which its selector is a match for the
|
||||
// arg-th substitution argument to a formatting call, formatting it as indicated
|
||||
// by format.
|
||||
//
|
||||
// The cases argument are pairs of selectors and messages. Selectors are of type
|
||||
// string or Form. Messages are of type string or catalog.Message. A selector
|
||||
// matches an argument if:
|
||||
// - it is "other" or Other
|
||||
// - it matches the plural form of the argument: "zero", "one", "two", "few",
|
||||
// or "many", or the equivalent Form
|
||||
// - it is of the form "=x" where x is an integer that matches the value of
|
||||
// the argument.
|
||||
// - it is of the form "<x" where x is an integer that is larger than the
|
||||
// argument.
|
||||
//
|
||||
// The format argument determines the formatting parameters for which to
|
||||
// determine the plural form. This is especially relevant for non-integer
|
||||
// values.
|
||||
//
|
||||
// The format string may be "", in which case a best-effort attempt is made to
|
||||
// find a reasonable representation on which to base the plural form. Examples
|
||||
// of format strings are:
|
||||
// - %.2f decimal with scale 2
|
||||
// - %.2e scientific notation with precision 3 (scale + 1)
|
||||
// - %d integer
|
||||
func Selectf(arg int, format string, cases ...interface{}) catalog.Message {
|
||||
var p parser
|
||||
// Intercept the formatting parameters of format by doing a dummy print.
|
||||
fmt.Fprintf(io.Discard, format, &p)
|
||||
m := &message{arg, kindDefault, 0, cases}
|
||||
switch p.verb {
|
||||
case 'g':
|
||||
m.kind = kindPrecision
|
||||
m.scale = p.scale
|
||||
case 'f':
|
||||
m.kind = kindScale
|
||||
m.scale = p.scale
|
||||
case 'e':
|
||||
m.kind = kindScientific
|
||||
m.scale = p.scale
|
||||
case 'd':
|
||||
m.kind = kindScale
|
||||
m.scale = 0
|
||||
default:
|
||||
// TODO: do we need to handle errors?
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
type parser struct {
|
||||
verb rune
|
||||
scale int
|
||||
}
|
||||
|
||||
func (p *parser) Format(s fmt.State, verb rune) {
|
||||
p.verb = verb
|
||||
p.scale = -1
|
||||
if prec, ok := s.Precision(); ok {
|
||||
p.scale = prec
|
||||
}
|
||||
}
|
||||
|
||||
type message struct {
|
||||
arg int
|
||||
kind int
|
||||
scale int
|
||||
cases []interface{}
|
||||
}
|
||||
|
||||
const (
|
||||
// Start with non-ASCII to allow skipping values.
|
||||
kindDefault = 0x80 + iota
|
||||
kindScale // verb f, number of fraction digits follows
|
||||
kindScientific // verb e, number of fraction digits follows
|
||||
kindPrecision // verb g, number of significant digits follows
|
||||
)
|
||||
|
||||
var handle = catmsg.Register("golang.org/x/text/feature/plural:plural", execute)
|
||||
|
||||
func (m *message) Compile(e *catmsg.Encoder) error {
|
||||
e.EncodeMessageType(handle)
|
||||
|
||||
e.EncodeUint(uint64(m.arg))
|
||||
|
||||
e.EncodeUint(uint64(m.kind))
|
||||
if m.kind > kindDefault {
|
||||
e.EncodeUint(uint64(m.scale))
|
||||
}
|
||||
|
||||
forms := validForms(cardinal, e.Language())
|
||||
|
||||
for i := 0; i < len(m.cases); {
|
||||
if err := compileSelector(e, forms, m.cases[i]); err != nil {
|
||||
return err
|
||||
}
|
||||
if i++; i >= len(m.cases) {
|
||||
return fmt.Errorf("plural: no message defined for selector %v", m.cases[i-1])
|
||||
}
|
||||
var msg catalog.Message
|
||||
switch x := m.cases[i].(type) {
|
||||
case string:
|
||||
msg = catalog.String(x)
|
||||
case catalog.Message:
|
||||
msg = x
|
||||
default:
|
||||
return fmt.Errorf("plural: message of type %T; must be string or catalog.Message", x)
|
||||
}
|
||||
if err := e.EncodeMessage(msg); err != nil {
|
||||
return err
|
||||
}
|
||||
i++
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func compileSelector(e *catmsg.Encoder, valid []Form, selector interface{}) error {
|
||||
form := Other
|
||||
switch x := selector.(type) {
|
||||
case string:
|
||||
if x == "" {
|
||||
return fmt.Errorf("plural: empty selector")
|
||||
}
|
||||
if c := x[0]; c == '=' || c == '<' {
|
||||
val, err := strconv.ParseUint(x[1:], 10, 16)
|
||||
if err != nil {
|
||||
return fmt.Errorf("plural: invalid number in selector %q: %v", selector, err)
|
||||
}
|
||||
e.EncodeUint(uint64(c))
|
||||
e.EncodeUint(val)
|
||||
return nil
|
||||
}
|
||||
var ok bool
|
||||
form, ok = countMap[x]
|
||||
if !ok {
|
||||
return fmt.Errorf("plural: invalid plural form %q", selector)
|
||||
}
|
||||
case Form:
|
||||
form = x
|
||||
default:
|
||||
return fmt.Errorf("plural: selector of type %T; want string or Form", selector)
|
||||
}
|
||||
|
||||
ok := false
|
||||
for _, f := range valid {
|
||||
if f == form {
|
||||
ok = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !ok {
|
||||
return fmt.Errorf("plural: form %q not supported for language %q", selector, e.Language())
|
||||
}
|
||||
e.EncodeUint(uint64(form))
|
||||
return nil
|
||||
}
|
||||
|
||||
func execute(d *catmsg.Decoder) bool {
|
||||
lang := d.Language()
|
||||
argN := int(d.DecodeUint())
|
||||
kind := int(d.DecodeUint())
|
||||
scale := -1 // default
|
||||
if kind > kindDefault {
|
||||
scale = int(d.DecodeUint())
|
||||
}
|
||||
form := Other
|
||||
n := -1
|
||||
if arg := d.Arg(argN); arg == nil {
|
||||
// Default to Other.
|
||||
} else if x, ok := arg.(number.VisibleDigits); ok {
|
||||
d := x.Digits(nil, lang, scale)
|
||||
form, n = cardinal.matchDisplayDigits(lang, &d)
|
||||
} else if x, ok := arg.(Interface); ok {
|
||||
// This covers lists and formatters from the number package.
|
||||
form, n = x.PluralForm(lang, scale)
|
||||
} else {
|
||||
var f number.Formatter
|
||||
switch kind {
|
||||
case kindScale:
|
||||
f.InitDecimal(lang)
|
||||
f.SetScale(scale)
|
||||
case kindScientific:
|
||||
f.InitScientific(lang)
|
||||
f.SetScale(scale)
|
||||
case kindPrecision:
|
||||
f.InitDecimal(lang)
|
||||
f.SetPrecision(scale)
|
||||
case kindDefault:
|
||||
// sensible default
|
||||
f.InitDecimal(lang)
|
||||
if k := reflect.TypeOf(arg).Kind(); reflect.Int <= k && k <= reflect.Uintptr {
|
||||
f.SetScale(0)
|
||||
} else {
|
||||
f.SetScale(2)
|
||||
}
|
||||
}
|
||||
var dec number.Decimal // TODO: buffer in Printer
|
||||
dec.Convert(f.RoundingContext, arg)
|
||||
v := number.FormatDigits(&dec, f.RoundingContext)
|
||||
if !v.NaN && !v.Inf {
|
||||
form, n = cardinal.matchDisplayDigits(d.Language(), &v)
|
||||
}
|
||||
}
|
||||
for !d.Done() {
|
||||
f := d.DecodeUint()
|
||||
if (f == '=' && n == int(d.DecodeUint())) ||
|
||||
(f == '<' && 0 <= n && n < int(d.DecodeUint())) ||
|
||||
form == Form(f) ||
|
||||
Other == Form(f) {
|
||||
return d.ExecuteMessage()
|
||||
}
|
||||
d.SkipMessage()
|
||||
}
|
||||
return false
|
||||
}
|
||||
262
vendor/golang.org/x/text/feature/plural/plural.go
generated
vendored
Normal file
262
vendor/golang.org/x/text/feature/plural/plural.go
generated
vendored
Normal file
|
|
@ -0,0 +1,262 @@
|
|||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:generate go run gen.go gen_common.go
|
||||
|
||||
// Package plural provides utilities for handling linguistic plurals in text.
|
||||
//
|
||||
// The definitions in this package are based on the plural rule handling defined
|
||||
// in CLDR. See
|
||||
// https://unicode.org/reports/tr35/tr35-numbers.html#Language_Plural_Rules for
|
||||
// details.
|
||||
package plural
|
||||
|
||||
import (
|
||||
"golang.org/x/text/internal/language/compact"
|
||||
"golang.org/x/text/internal/number"
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
// Rules defines the plural rules for all languages for a certain plural type.
|
||||
//
|
||||
// This package is UNDER CONSTRUCTION and its API may change.
|
||||
type Rules struct {
|
||||
rules []pluralCheck
|
||||
index []byte
|
||||
langToIndex []byte
|
||||
inclusionMasks []uint64
|
||||
}
|
||||
|
||||
var (
|
||||
// Cardinal defines the plural rules for numbers indicating quantities.
|
||||
Cardinal *Rules = cardinal
|
||||
|
||||
// Ordinal defines the plural rules for numbers indicating position
|
||||
// (first, second, etc.).
|
||||
Ordinal *Rules = ordinal
|
||||
|
||||
ordinal = &Rules{
|
||||
ordinalRules,
|
||||
ordinalIndex,
|
||||
ordinalLangToIndex,
|
||||
ordinalInclusionMasks[:],
|
||||
}
|
||||
|
||||
cardinal = &Rules{
|
||||
cardinalRules,
|
||||
cardinalIndex,
|
||||
cardinalLangToIndex,
|
||||
cardinalInclusionMasks[:],
|
||||
}
|
||||
)
|
||||
|
||||
// getIntApprox converts the digits in slice digits[start:end] to an integer
|
||||
// according to the following rules:
|
||||
// - Let i be asInt(digits[start:end]), where out-of-range digits are assumed
|
||||
// to be zero.
|
||||
// - Result n is big if i / 10^nMod > 1.
|
||||
// - Otherwise the result is i % 10^nMod.
|
||||
//
|
||||
// For example, if digits is {1, 2, 3} and start:end is 0:5, then the result
|
||||
// for various values of nMod is:
|
||||
// - when nMod == 2, n == big
|
||||
// - when nMod == 3, n == big
|
||||
// - when nMod == 4, n == big
|
||||
// - when nMod == 5, n == 12300
|
||||
// - when nMod == 6, n == 12300
|
||||
// - when nMod == 7, n == 12300
|
||||
func getIntApprox(digits []byte, start, end, nMod, big int) (n int) {
|
||||
// Leading 0 digits just result in 0.
|
||||
p := start
|
||||
if p < 0 {
|
||||
p = 0
|
||||
}
|
||||
// Range only over the part for which we have digits.
|
||||
mid := end
|
||||
if mid >= len(digits) {
|
||||
mid = len(digits)
|
||||
}
|
||||
// Check digits more significant that nMod.
|
||||
if q := end - nMod; q > 0 {
|
||||
if q > mid {
|
||||
q = mid
|
||||
}
|
||||
for ; p < q; p++ {
|
||||
if digits[p] != 0 {
|
||||
return big
|
||||
}
|
||||
}
|
||||
}
|
||||
for ; p < mid; p++ {
|
||||
n = 10*n + int(digits[p])
|
||||
}
|
||||
// Multiply for trailing zeros.
|
||||
for ; p < end; p++ {
|
||||
n *= 10
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// MatchDigits computes the plural form for the given language and the given
|
||||
// decimal floating point digits. The digits are stored in big-endian order and
|
||||
// are of value byte(0) - byte(9). The floating point position is indicated by
|
||||
// exp and the number of visible decimals is scale. All leading and trailing
|
||||
// zeros may be omitted from digits.
|
||||
//
|
||||
// The following table contains examples of possible arguments to represent
|
||||
// the given numbers.
|
||||
//
|
||||
// decimal digits exp scale
|
||||
// 123 []byte{1, 2, 3} 3 0
|
||||
// 123.4 []byte{1, 2, 3, 4} 3 1
|
||||
// 123.40 []byte{1, 2, 3, 4} 3 2
|
||||
// 100000 []byte{1} 6 0
|
||||
// 100000.00 []byte{1} 6 3
|
||||
func (p *Rules) MatchDigits(t language.Tag, digits []byte, exp, scale int) Form {
|
||||
index := tagToID(t)
|
||||
|
||||
// Differentiate up to including mod 1000000 for the integer part.
|
||||
n := getIntApprox(digits, 0, exp, 6, 1000000)
|
||||
|
||||
// Differentiate up to including mod 100 for the fractional part.
|
||||
f := getIntApprox(digits, exp, exp+scale, 2, 100)
|
||||
|
||||
return matchPlural(p, index, n, f, scale)
|
||||
}
|
||||
|
||||
func (p *Rules) matchDisplayDigits(t language.Tag, d *number.Digits) (Form, int) {
|
||||
n := getIntApprox(d.Digits, 0, int(d.Exp), 6, 1000000)
|
||||
return p.MatchDigits(t, d.Digits, int(d.Exp), d.NumFracDigits()), n
|
||||
}
|
||||
|
||||
func validForms(p *Rules, t language.Tag) (forms []Form) {
|
||||
offset := p.langToIndex[tagToID(t)]
|
||||
rules := p.rules[p.index[offset]:p.index[offset+1]]
|
||||
|
||||
forms = append(forms, Other)
|
||||
last := Other
|
||||
for _, r := range rules {
|
||||
if cat := Form(r.cat & formMask); cat != andNext && last != cat {
|
||||
forms = append(forms, cat)
|
||||
last = cat
|
||||
}
|
||||
}
|
||||
return forms
|
||||
}
|
||||
|
||||
func (p *Rules) matchComponents(t language.Tag, n, f, scale int) Form {
|
||||
return matchPlural(p, tagToID(t), n, f, scale)
|
||||
}
|
||||
|
||||
// MatchPlural returns the plural form for the given language and plural
|
||||
// operands (as defined in
|
||||
// https://unicode.org/reports/tr35/tr35-numbers.html#Language_Plural_Rules):
|
||||
//
|
||||
// where
|
||||
// n absolute value of the source number (integer and decimals)
|
||||
// input
|
||||
// i integer digits of n.
|
||||
// v number of visible fraction digits in n, with trailing zeros.
|
||||
// w number of visible fraction digits in n, without trailing zeros.
|
||||
// f visible fractional digits in n, with trailing zeros (f = t * 10^(v-w))
|
||||
// t visible fractional digits in n, without trailing zeros.
|
||||
//
|
||||
// If any of the operand values is too large to fit in an int, it is okay to
|
||||
// pass the value modulo 10,000,000.
|
||||
func (p *Rules) MatchPlural(lang language.Tag, i, v, w, f, t int) Form {
|
||||
return matchPlural(p, tagToID(lang), i, f, v)
|
||||
}
|
||||
|
||||
func matchPlural(p *Rules, index compact.ID, n, f, v int) Form {
|
||||
nMask := p.inclusionMasks[n%maxMod]
|
||||
// Compute the fMask inline in the rules below, as it is relatively rare.
|
||||
// fMask := p.inclusionMasks[f%maxMod]
|
||||
vMask := p.inclusionMasks[v%maxMod]
|
||||
|
||||
// Do the matching
|
||||
offset := p.langToIndex[index]
|
||||
rules := p.rules[p.index[offset]:p.index[offset+1]]
|
||||
for i := 0; i < len(rules); i++ {
|
||||
rule := rules[i]
|
||||
setBit := uint64(1 << rule.setID)
|
||||
var skip bool
|
||||
switch op := opID(rule.cat >> opShift); op {
|
||||
case opI: // i = x
|
||||
skip = n >= numN || nMask&setBit == 0
|
||||
|
||||
case opI | opNotEqual: // i != x
|
||||
skip = n < numN && nMask&setBit != 0
|
||||
|
||||
case opI | opMod: // i % m = x
|
||||
skip = nMask&setBit == 0
|
||||
|
||||
case opI | opMod | opNotEqual: // i % m != x
|
||||
skip = nMask&setBit != 0
|
||||
|
||||
case opN: // n = x
|
||||
skip = f != 0 || n >= numN || nMask&setBit == 0
|
||||
|
||||
case opN | opNotEqual: // n != x
|
||||
skip = f == 0 && n < numN && nMask&setBit != 0
|
||||
|
||||
case opN | opMod: // n % m = x
|
||||
skip = f != 0 || nMask&setBit == 0
|
||||
|
||||
case opN | opMod | opNotEqual: // n % m != x
|
||||
skip = f == 0 && nMask&setBit != 0
|
||||
|
||||
case opF: // f = x
|
||||
skip = f >= numN || p.inclusionMasks[f%maxMod]&setBit == 0
|
||||
|
||||
case opF | opNotEqual: // f != x
|
||||
skip = f < numN && p.inclusionMasks[f%maxMod]&setBit != 0
|
||||
|
||||
case opF | opMod: // f % m = x
|
||||
skip = p.inclusionMasks[f%maxMod]&setBit == 0
|
||||
|
||||
case opF | opMod | opNotEqual: // f % m != x
|
||||
skip = p.inclusionMasks[f%maxMod]&setBit != 0
|
||||
|
||||
case opV: // v = x
|
||||
skip = v < numN && vMask&setBit == 0
|
||||
|
||||
case opV | opNotEqual: // v != x
|
||||
skip = v < numN && vMask&setBit != 0
|
||||
|
||||
case opW: // w == 0
|
||||
skip = f != 0
|
||||
|
||||
case opW | opNotEqual: // w != 0
|
||||
skip = f == 0
|
||||
|
||||
// Hard-wired rules that cannot be handled by our algorithm.
|
||||
|
||||
case opBretonM:
|
||||
skip = f != 0 || n == 0 || n%1000000 != 0
|
||||
|
||||
case opAzerbaijan00s:
|
||||
// 100,200,300,400,500,600,700,800,900
|
||||
skip = n == 0 || n >= 1000 || n%100 != 0
|
||||
|
||||
case opItalian800:
|
||||
skip = (f != 0 || n >= numN || nMask&setBit == 0) && n != 800
|
||||
}
|
||||
if skip {
|
||||
// advance over AND entries.
|
||||
for ; i < len(rules) && rules[i].cat&formMask == andNext; i++ {
|
||||
}
|
||||
continue
|
||||
}
|
||||
// return if we have a final entry.
|
||||
if cat := rule.cat & formMask; cat != andNext {
|
||||
return Form(cat)
|
||||
}
|
||||
}
|
||||
return Other
|
||||
}
|
||||
|
||||
func tagToID(t language.Tag) compact.ID {
|
||||
id, _ := compact.RegionalID(compact.Tag(t))
|
||||
return id
|
||||
}
|
||||
552
vendor/golang.org/x/text/feature/plural/tables.go
generated
vendored
Normal file
552
vendor/golang.org/x/text/feature/plural/tables.go
generated
vendored
Normal file
|
|
@ -0,0 +1,552 @@
|
|||
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
|
||||
|
||||
package plural
|
||||
|
||||
// CLDRVersion is the CLDR version from which the tables in this package are derived.
|
||||
const CLDRVersion = "32"
|
||||
|
||||
var ordinalRules = []pluralCheck{ // 64 elements
|
||||
0: {cat: 0x2f, setID: 0x4},
|
||||
1: {cat: 0x3a, setID: 0x5},
|
||||
2: {cat: 0x22, setID: 0x1},
|
||||
3: {cat: 0x22, setID: 0x6},
|
||||
4: {cat: 0x22, setID: 0x7},
|
||||
5: {cat: 0x2f, setID: 0x8},
|
||||
6: {cat: 0x3c, setID: 0x9},
|
||||
7: {cat: 0x2f, setID: 0xa},
|
||||
8: {cat: 0x3c, setID: 0xb},
|
||||
9: {cat: 0x2c, setID: 0xc},
|
||||
10: {cat: 0x24, setID: 0xd},
|
||||
11: {cat: 0x2d, setID: 0xe},
|
||||
12: {cat: 0x2d, setID: 0xf},
|
||||
13: {cat: 0x2f, setID: 0x10},
|
||||
14: {cat: 0x35, setID: 0x3},
|
||||
15: {cat: 0xc5, setID: 0x11},
|
||||
16: {cat: 0x2, setID: 0x1},
|
||||
17: {cat: 0x5, setID: 0x3},
|
||||
18: {cat: 0xd, setID: 0x12},
|
||||
19: {cat: 0x22, setID: 0x1},
|
||||
20: {cat: 0x2f, setID: 0x13},
|
||||
21: {cat: 0x3d, setID: 0x14},
|
||||
22: {cat: 0x2f, setID: 0x15},
|
||||
23: {cat: 0x3a, setID: 0x16},
|
||||
24: {cat: 0x2f, setID: 0x17},
|
||||
25: {cat: 0x3b, setID: 0x18},
|
||||
26: {cat: 0x2f, setID: 0xa},
|
||||
27: {cat: 0x3c, setID: 0xb},
|
||||
28: {cat: 0x22, setID: 0x1},
|
||||
29: {cat: 0x23, setID: 0x19},
|
||||
30: {cat: 0x24, setID: 0x1a},
|
||||
31: {cat: 0x22, setID: 0x1b},
|
||||
32: {cat: 0x23, setID: 0x2},
|
||||
33: {cat: 0x24, setID: 0x1a},
|
||||
34: {cat: 0xf, setID: 0x15},
|
||||
35: {cat: 0x1a, setID: 0x16},
|
||||
36: {cat: 0xf, setID: 0x17},
|
||||
37: {cat: 0x1b, setID: 0x18},
|
||||
38: {cat: 0xf, setID: 0x1c},
|
||||
39: {cat: 0x1d, setID: 0x1d},
|
||||
40: {cat: 0xa, setID: 0x1e},
|
||||
41: {cat: 0xa, setID: 0x1f},
|
||||
42: {cat: 0xc, setID: 0x20},
|
||||
43: {cat: 0xe4, setID: 0x0},
|
||||
44: {cat: 0x5, setID: 0x3},
|
||||
45: {cat: 0xd, setID: 0xe},
|
||||
46: {cat: 0xd, setID: 0x21},
|
||||
47: {cat: 0x22, setID: 0x1},
|
||||
48: {cat: 0x23, setID: 0x19},
|
||||
49: {cat: 0x24, setID: 0x1a},
|
||||
50: {cat: 0x25, setID: 0x22},
|
||||
51: {cat: 0x22, setID: 0x23},
|
||||
52: {cat: 0x23, setID: 0x19},
|
||||
53: {cat: 0x24, setID: 0x1a},
|
||||
54: {cat: 0x25, setID: 0x22},
|
||||
55: {cat: 0x22, setID: 0x24},
|
||||
56: {cat: 0x23, setID: 0x19},
|
||||
57: {cat: 0x24, setID: 0x1a},
|
||||
58: {cat: 0x25, setID: 0x22},
|
||||
59: {cat: 0x21, setID: 0x25},
|
||||
60: {cat: 0x22, setID: 0x1},
|
||||
61: {cat: 0x23, setID: 0x2},
|
||||
62: {cat: 0x24, setID: 0x26},
|
||||
63: {cat: 0x25, setID: 0x27},
|
||||
} // Size: 152 bytes
|
||||
|
||||
var ordinalIndex = []uint8{ // 22 elements
|
||||
0x00, 0x00, 0x02, 0x03, 0x04, 0x05, 0x07, 0x09,
|
||||
0x0b, 0x0f, 0x10, 0x13, 0x16, 0x1c, 0x1f, 0x22,
|
||||
0x28, 0x2f, 0x33, 0x37, 0x3b, 0x40,
|
||||
} // Size: 46 bytes
|
||||
|
||||
var ordinalLangToIndex = []uint8{ // 775 elements
|
||||
// Entry 0 - 3F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x12, 0x12, 0x00, 0x00, 0x00, 0x00, 0x10, 0x10,
|
||||
0x10, 0x10, 0x10, 0x00, 0x00, 0x05, 0x05, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 40 - 7F
|
||||
0x12, 0x12, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e,
|
||||
0x0e, 0x0e, 0x0e, 0x0e, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x14, 0x14, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 80 - BF
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
// Entry C0 - FF
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 100 - 13F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02,
|
||||
0x00, 0x00, 0x00, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
// Entry 140 - 17F
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x11, 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11,
|
||||
0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x03,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 180 - 1BF
|
||||
0x00, 0x00, 0x00, 0x00, 0x09, 0x09, 0x09, 0x09,
|
||||
0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x0a, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x08, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 1C0 - 1FF
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x0f, 0x0f, 0x00, 0x00,
|
||||
0x00, 0x00, 0x02, 0x0d, 0x0d, 0x02, 0x02, 0x02,
|
||||
0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 200 - 23F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x04, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x13, 0x13, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 240 - 27F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 280 - 2BF
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x0b, 0x0b, 0x0b, 0x0b, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01,
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x07, 0x07, 0x02, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 2C0 - 2FF
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x06, 0x06, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 300 - 33F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x0c,
|
||||
} // Size: 799 bytes
|
||||
|
||||
var ordinalInclusionMasks = []uint64{ // 100 elements
|
||||
// Entry 0 - 1F
|
||||
0x0000002000010009, 0x00000018482000d3, 0x0000000042840195, 0x000000410a040581,
|
||||
0x00000041040c0081, 0x0000009840040041, 0x0000008400045001, 0x0000003850040001,
|
||||
0x0000003850060001, 0x0000003800049001, 0x0000000800052001, 0x0000000040660031,
|
||||
0x0000000041840331, 0x0000000100040f01, 0x00000001001c0001, 0x0000000040040001,
|
||||
0x0000000000045001, 0x0000000070040001, 0x0000000070040001, 0x0000000000049001,
|
||||
0x0000000080050001, 0x0000000040200011, 0x0000000040800111, 0x0000000100000501,
|
||||
0x0000000100080001, 0x0000000040000001, 0x0000000000005001, 0x0000000050000001,
|
||||
0x0000000050000001, 0x0000000000009001, 0x0000000000010001, 0x0000000040200011,
|
||||
// Entry 20 - 3F
|
||||
0x0000000040800111, 0x0000000100000501, 0x0000000100080001, 0x0000000040000001,
|
||||
0x0000000000005001, 0x0000000050000001, 0x0000000050000001, 0x0000000000009001,
|
||||
0x0000000200050001, 0x0000000040200011, 0x0000000040800111, 0x0000000100000501,
|
||||
0x0000000100080001, 0x0000000040000001, 0x0000000000005001, 0x0000000050000001,
|
||||
0x0000000050000001, 0x0000000000009001, 0x0000000080010001, 0x0000000040200011,
|
||||
0x0000000040800111, 0x0000000100000501, 0x0000000100080001, 0x0000000040000001,
|
||||
0x0000000000005001, 0x0000000050000001, 0x0000000050000001, 0x0000000000009001,
|
||||
0x0000000200050001, 0x0000000040200011, 0x0000000040800111, 0x0000000100000501,
|
||||
// Entry 40 - 5F
|
||||
0x0000000100080001, 0x0000000040000001, 0x0000000000005001, 0x0000000050000001,
|
||||
0x0000000050000001, 0x0000000000009001, 0x0000000080010001, 0x0000000040200011,
|
||||
0x0000000040800111, 0x0000000100000501, 0x0000000100080001, 0x0000000040000001,
|
||||
0x0000000000005001, 0x0000000050000001, 0x0000000050000001, 0x0000000000009001,
|
||||
0x0000000080070001, 0x0000000040200011, 0x0000000040800111, 0x0000000100000501,
|
||||
0x0000000100080001, 0x0000000040000001, 0x0000000000005001, 0x0000000050000001,
|
||||
0x0000000050000001, 0x0000000000009001, 0x0000000200010001, 0x0000000040200011,
|
||||
0x0000000040800111, 0x0000000100000501, 0x0000000100080001, 0x0000000040000001,
|
||||
// Entry 60 - 7F
|
||||
0x0000000000005001, 0x0000000050000001, 0x0000000050000001, 0x0000000000009001,
|
||||
} // Size: 824 bytes
|
||||
|
||||
// Slots used for ordinal: 40 of 0xFF rules; 16 of 0xFF indexes; 40 of 64 sets
|
||||
|
||||
var cardinalRules = []pluralCheck{ // 166 elements
|
||||
0: {cat: 0x2, setID: 0x3},
|
||||
1: {cat: 0x22, setID: 0x1},
|
||||
2: {cat: 0x2, setID: 0x4},
|
||||
3: {cat: 0x2, setID: 0x4},
|
||||
4: {cat: 0x7, setID: 0x1},
|
||||
5: {cat: 0x62, setID: 0x3},
|
||||
6: {cat: 0x22, setID: 0x4},
|
||||
7: {cat: 0x7, setID: 0x3},
|
||||
8: {cat: 0x42, setID: 0x1},
|
||||
9: {cat: 0x22, setID: 0x4},
|
||||
10: {cat: 0x22, setID: 0x4},
|
||||
11: {cat: 0x22, setID: 0x5},
|
||||
12: {cat: 0x22, setID: 0x1},
|
||||
13: {cat: 0x22, setID: 0x1},
|
||||
14: {cat: 0x7, setID: 0x4},
|
||||
15: {cat: 0x92, setID: 0x3},
|
||||
16: {cat: 0xf, setID: 0x6},
|
||||
17: {cat: 0x1f, setID: 0x7},
|
||||
18: {cat: 0x82, setID: 0x3},
|
||||
19: {cat: 0x92, setID: 0x3},
|
||||
20: {cat: 0xf, setID: 0x6},
|
||||
21: {cat: 0x62, setID: 0x3},
|
||||
22: {cat: 0x4a, setID: 0x6},
|
||||
23: {cat: 0x7, setID: 0x8},
|
||||
24: {cat: 0x62, setID: 0x3},
|
||||
25: {cat: 0x1f, setID: 0x9},
|
||||
26: {cat: 0x62, setID: 0x3},
|
||||
27: {cat: 0x5f, setID: 0x9},
|
||||
28: {cat: 0x72, setID: 0x3},
|
||||
29: {cat: 0x29, setID: 0xa},
|
||||
30: {cat: 0x29, setID: 0xb},
|
||||
31: {cat: 0x4f, setID: 0xb},
|
||||
32: {cat: 0x61, setID: 0x2},
|
||||
33: {cat: 0x2f, setID: 0x6},
|
||||
34: {cat: 0x3a, setID: 0x7},
|
||||
35: {cat: 0x4f, setID: 0x6},
|
||||
36: {cat: 0x5f, setID: 0x7},
|
||||
37: {cat: 0x62, setID: 0x2},
|
||||
38: {cat: 0x4f, setID: 0x6},
|
||||
39: {cat: 0x72, setID: 0x2},
|
||||
40: {cat: 0x21, setID: 0x3},
|
||||
41: {cat: 0x7, setID: 0x4},
|
||||
42: {cat: 0x32, setID: 0x3},
|
||||
43: {cat: 0x21, setID: 0x3},
|
||||
44: {cat: 0x22, setID: 0x1},
|
||||
45: {cat: 0x22, setID: 0x1},
|
||||
46: {cat: 0x23, setID: 0x2},
|
||||
47: {cat: 0x2, setID: 0x3},
|
||||
48: {cat: 0x22, setID: 0x1},
|
||||
49: {cat: 0x24, setID: 0xc},
|
||||
50: {cat: 0x7, setID: 0x1},
|
||||
51: {cat: 0x62, setID: 0x3},
|
||||
52: {cat: 0x74, setID: 0x3},
|
||||
53: {cat: 0x24, setID: 0x3},
|
||||
54: {cat: 0x2f, setID: 0xd},
|
||||
55: {cat: 0x34, setID: 0x1},
|
||||
56: {cat: 0xf, setID: 0x6},
|
||||
57: {cat: 0x1f, setID: 0x7},
|
||||
58: {cat: 0x62, setID: 0x3},
|
||||
59: {cat: 0x4f, setID: 0x6},
|
||||
60: {cat: 0x5a, setID: 0x7},
|
||||
61: {cat: 0xf, setID: 0xe},
|
||||
62: {cat: 0x1f, setID: 0xf},
|
||||
63: {cat: 0x64, setID: 0x3},
|
||||
64: {cat: 0x4f, setID: 0xe},
|
||||
65: {cat: 0x5c, setID: 0xf},
|
||||
66: {cat: 0x22, setID: 0x10},
|
||||
67: {cat: 0x23, setID: 0x11},
|
||||
68: {cat: 0x24, setID: 0x12},
|
||||
69: {cat: 0xf, setID: 0x1},
|
||||
70: {cat: 0x62, setID: 0x3},
|
||||
71: {cat: 0xf, setID: 0x2},
|
||||
72: {cat: 0x63, setID: 0x3},
|
||||
73: {cat: 0xf, setID: 0x13},
|
||||
74: {cat: 0x64, setID: 0x3},
|
||||
75: {cat: 0x74, setID: 0x3},
|
||||
76: {cat: 0xf, setID: 0x1},
|
||||
77: {cat: 0x62, setID: 0x3},
|
||||
78: {cat: 0x4a, setID: 0x1},
|
||||
79: {cat: 0xf, setID: 0x2},
|
||||
80: {cat: 0x63, setID: 0x3},
|
||||
81: {cat: 0x4b, setID: 0x2},
|
||||
82: {cat: 0xf, setID: 0x13},
|
||||
83: {cat: 0x64, setID: 0x3},
|
||||
84: {cat: 0x4c, setID: 0x13},
|
||||
85: {cat: 0x7, setID: 0x1},
|
||||
86: {cat: 0x62, setID: 0x3},
|
||||
87: {cat: 0x7, setID: 0x2},
|
||||
88: {cat: 0x63, setID: 0x3},
|
||||
89: {cat: 0x2f, setID: 0xa},
|
||||
90: {cat: 0x37, setID: 0x14},
|
||||
91: {cat: 0x65, setID: 0x3},
|
||||
92: {cat: 0x7, setID: 0x1},
|
||||
93: {cat: 0x62, setID: 0x3},
|
||||
94: {cat: 0x7, setID: 0x15},
|
||||
95: {cat: 0x64, setID: 0x3},
|
||||
96: {cat: 0x75, setID: 0x3},
|
||||
97: {cat: 0x7, setID: 0x1},
|
||||
98: {cat: 0x62, setID: 0x3},
|
||||
99: {cat: 0xf, setID: 0xe},
|
||||
100: {cat: 0x1f, setID: 0xf},
|
||||
101: {cat: 0x64, setID: 0x3},
|
||||
102: {cat: 0xf, setID: 0x16},
|
||||
103: {cat: 0x17, setID: 0x1},
|
||||
104: {cat: 0x65, setID: 0x3},
|
||||
105: {cat: 0xf, setID: 0x17},
|
||||
106: {cat: 0x65, setID: 0x3},
|
||||
107: {cat: 0xf, setID: 0xf},
|
||||
108: {cat: 0x65, setID: 0x3},
|
||||
109: {cat: 0x2f, setID: 0x6},
|
||||
110: {cat: 0x3a, setID: 0x7},
|
||||
111: {cat: 0x2f, setID: 0xe},
|
||||
112: {cat: 0x3c, setID: 0xf},
|
||||
113: {cat: 0x2d, setID: 0xa},
|
||||
114: {cat: 0x2d, setID: 0x17},
|
||||
115: {cat: 0x2d, setID: 0x18},
|
||||
116: {cat: 0x2f, setID: 0x6},
|
||||
117: {cat: 0x3a, setID: 0xb},
|
||||
118: {cat: 0x2f, setID: 0x19},
|
||||
119: {cat: 0x3c, setID: 0xb},
|
||||
120: {cat: 0x55, setID: 0x3},
|
||||
121: {cat: 0x22, setID: 0x1},
|
||||
122: {cat: 0x24, setID: 0x3},
|
||||
123: {cat: 0x2c, setID: 0xc},
|
||||
124: {cat: 0x2d, setID: 0xb},
|
||||
125: {cat: 0xf, setID: 0x6},
|
||||
126: {cat: 0x1f, setID: 0x7},
|
||||
127: {cat: 0x62, setID: 0x3},
|
||||
128: {cat: 0xf, setID: 0xe},
|
||||
129: {cat: 0x1f, setID: 0xf},
|
||||
130: {cat: 0x64, setID: 0x3},
|
||||
131: {cat: 0xf, setID: 0xa},
|
||||
132: {cat: 0x65, setID: 0x3},
|
||||
133: {cat: 0xf, setID: 0x17},
|
||||
134: {cat: 0x65, setID: 0x3},
|
||||
135: {cat: 0xf, setID: 0x18},
|
||||
136: {cat: 0x65, setID: 0x3},
|
||||
137: {cat: 0x2f, setID: 0x6},
|
||||
138: {cat: 0x3a, setID: 0x1a},
|
||||
139: {cat: 0x2f, setID: 0x1b},
|
||||
140: {cat: 0x3b, setID: 0x1c},
|
||||
141: {cat: 0x2f, setID: 0x1d},
|
||||
142: {cat: 0x3c, setID: 0x1e},
|
||||
143: {cat: 0x37, setID: 0x3},
|
||||
144: {cat: 0xa5, setID: 0x0},
|
||||
145: {cat: 0x22, setID: 0x1},
|
||||
146: {cat: 0x23, setID: 0x2},
|
||||
147: {cat: 0x24, setID: 0x1f},
|
||||
148: {cat: 0x25, setID: 0x20},
|
||||
149: {cat: 0xf, setID: 0x6},
|
||||
150: {cat: 0x62, setID: 0x3},
|
||||
151: {cat: 0xf, setID: 0x1b},
|
||||
152: {cat: 0x63, setID: 0x3},
|
||||
153: {cat: 0xf, setID: 0x21},
|
||||
154: {cat: 0x64, setID: 0x3},
|
||||
155: {cat: 0x75, setID: 0x3},
|
||||
156: {cat: 0x21, setID: 0x3},
|
||||
157: {cat: 0x22, setID: 0x1},
|
||||
158: {cat: 0x23, setID: 0x2},
|
||||
159: {cat: 0x2c, setID: 0x22},
|
||||
160: {cat: 0x2d, setID: 0x5},
|
||||
161: {cat: 0x21, setID: 0x3},
|
||||
162: {cat: 0x22, setID: 0x1},
|
||||
163: {cat: 0x23, setID: 0x2},
|
||||
164: {cat: 0x24, setID: 0x23},
|
||||
165: {cat: 0x25, setID: 0x24},
|
||||
} // Size: 356 bytes
|
||||
|
||||
var cardinalIndex = []uint8{ // 36 elements
|
||||
0x00, 0x00, 0x02, 0x03, 0x04, 0x06, 0x09, 0x0a,
|
||||
0x0c, 0x0d, 0x10, 0x14, 0x17, 0x1d, 0x28, 0x2b,
|
||||
0x2d, 0x2f, 0x32, 0x38, 0x42, 0x45, 0x4c, 0x55,
|
||||
0x5c, 0x61, 0x6d, 0x74, 0x79, 0x7d, 0x89, 0x91,
|
||||
0x95, 0x9c, 0xa1, 0xa6,
|
||||
} // Size: 60 bytes
|
||||
|
||||
var cardinalLangToIndex = []uint8{ // 775 elements
|
||||
// Entry 0 - 3F
|
||||
0x00, 0x08, 0x08, 0x08, 0x00, 0x00, 0x06, 0x06,
|
||||
0x01, 0x01, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21,
|
||||
0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21,
|
||||
0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21,
|
||||
0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21,
|
||||
0x01, 0x01, 0x08, 0x08, 0x04, 0x04, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x00, 0x00, 0x1a, 0x1a, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x06, 0x00, 0x00,
|
||||
// Entry 40 - 7F
|
||||
0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x1e, 0x1e,
|
||||
0x08, 0x08, 0x13, 0x13, 0x13, 0x13, 0x13, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x00, 0x00, 0x00, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x18, 0x18, 0x00, 0x00, 0x22, 0x22, 0x09, 0x09,
|
||||
0x09, 0x00, 0x00, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x00, 0x00, 0x16, 0x16, 0x00,
|
||||
0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 80 - BF
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
// Entry C0 - FF
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
// Entry 100 - 13F
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x04, 0x04,
|
||||
0x08, 0x08, 0x00, 0x00, 0x01, 0x01, 0x01, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x04, 0x04, 0x0c, 0x0c,
|
||||
0x08, 0x08, 0x08, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
// Entry 140 - 17F
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x08, 0x08, 0x04, 0x04, 0x1f, 0x1f,
|
||||
0x14, 0x14, 0x04, 0x04, 0x08, 0x08, 0x08, 0x08,
|
||||
0x01, 0x01, 0x06, 0x00, 0x00, 0x20, 0x20, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x17, 0x17, 0x01,
|
||||
0x01, 0x13, 0x13, 0x13, 0x16, 0x16, 0x08, 0x08,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 180 - 1BF
|
||||
0x00, 0x04, 0x0a, 0x0a, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x10, 0x17, 0x00, 0x00, 0x00, 0x08, 0x08,
|
||||
0x04, 0x08, 0x08, 0x00, 0x00, 0x08, 0x08, 0x02,
|
||||
0x02, 0x08, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x00, 0x00, 0x00, 0x00, 0x01,
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x08,
|
||||
0x08, 0x08, 0x00, 0x00, 0x0f, 0x0f, 0x08, 0x10,
|
||||
// Entry 1C0 - 1FF
|
||||
0x10, 0x08, 0x08, 0x0e, 0x0e, 0x08, 0x08, 0x08,
|
||||
0x08, 0x00, 0x00, 0x06, 0x06, 0x06, 0x06, 0x06,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x1b, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x0d, 0x0d, 0x08,
|
||||
0x08, 0x08, 0x00, 0x00, 0x00, 0x00, 0x06, 0x06,
|
||||
0x00, 0x00, 0x08, 0x08, 0x0b, 0x0b, 0x08, 0x08,
|
||||
0x08, 0x08, 0x12, 0x01, 0x01, 0x00, 0x00, 0x00,
|
||||
0x00, 0x1c, 0x1c, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 200 - 23F
|
||||
0x00, 0x08, 0x10, 0x10, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x00, 0x00, 0x00, 0x08, 0x08, 0x08, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x00,
|
||||
0x00, 0x08, 0x08, 0x08, 0x08, 0x08, 0x00, 0x08,
|
||||
0x06, 0x00, 0x00, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x06, 0x06,
|
||||
0x06, 0x06, 0x06, 0x08, 0x19, 0x19, 0x0d, 0x0d,
|
||||
0x08, 0x08, 0x03, 0x04, 0x03, 0x04, 0x04, 0x04,
|
||||
// Entry 240 - 27F
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x00,
|
||||
0x00, 0x00, 0x00, 0x08, 0x08, 0x00, 0x00, 0x12,
|
||||
0x12, 0x12, 0x08, 0x08, 0x1d, 0x1d, 0x1d, 0x1d,
|
||||
0x1d, 0x1d, 0x1d, 0x00, 0x00, 0x08, 0x08, 0x00,
|
||||
0x00, 0x08, 0x08, 0x00, 0x00, 0x08, 0x08, 0x08,
|
||||
0x10, 0x10, 0x10, 0x10, 0x08, 0x08, 0x00, 0x00,
|
||||
0x00, 0x00, 0x13, 0x11, 0x11, 0x11, 0x11, 0x11,
|
||||
0x05, 0x05, 0x18, 0x18, 0x15, 0x15, 0x10, 0x10,
|
||||
// Entry 280 - 2BF
|
||||
0x10, 0x10, 0x10, 0x10, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x13,
|
||||
0x13, 0x13, 0x13, 0x13, 0x13, 0x13, 0x13, 0x13,
|
||||
0x13, 0x13, 0x08, 0x08, 0x08, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x00, 0x00, 0x00, 0x00, 0x06, 0x06, 0x06,
|
||||
0x08, 0x08, 0x08, 0x0c, 0x08, 0x00, 0x00, 0x08,
|
||||
// Entry 2C0 - 2FF
|
||||
0x08, 0x08, 0x08, 0x00, 0x00, 0x00, 0x00, 0x07,
|
||||
0x07, 0x08, 0x08, 0x1d, 0x1d, 0x04, 0x04, 0x04,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x08,
|
||||
0x08, 0x08, 0x08, 0x06, 0x08, 0x08, 0x00, 0x00,
|
||||
0x08, 0x08, 0x08, 0x00, 0x00, 0x04, 0x04, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 300 - 33F
|
||||
0x00, 0x00, 0x00, 0x01, 0x01, 0x04, 0x04,
|
||||
} // Size: 799 bytes
|
||||
|
||||
var cardinalInclusionMasks = []uint64{ // 100 elements
|
||||
// Entry 0 - 1F
|
||||
0x0000000200500419, 0x0000000000512153, 0x000000000a327105, 0x0000000ca23c7101,
|
||||
0x00000004a23c7201, 0x0000000482943001, 0x0000001482943201, 0x0000000502943001,
|
||||
0x0000000502943001, 0x0000000522943201, 0x0000000540543401, 0x00000000454128e1,
|
||||
0x000000005b02e821, 0x000000006304e821, 0x000000006304ea21, 0x0000000042842821,
|
||||
0x0000000042842a21, 0x0000000042842821, 0x0000000042842821, 0x0000000062842a21,
|
||||
0x0000000200400421, 0x0000000000400061, 0x000000000a004021, 0x0000000022004021,
|
||||
0x0000000022004221, 0x0000000002800021, 0x0000000002800221, 0x0000000002800021,
|
||||
0x0000000002800021, 0x0000000022800221, 0x0000000000400421, 0x0000000000400061,
|
||||
// Entry 20 - 3F
|
||||
0x000000000a004021, 0x0000000022004021, 0x0000000022004221, 0x0000000002800021,
|
||||
0x0000000002800221, 0x0000000002800021, 0x0000000002800021, 0x0000000022800221,
|
||||
0x0000000200400421, 0x0000000000400061, 0x000000000a004021, 0x0000000022004021,
|
||||
0x0000000022004221, 0x0000000002800021, 0x0000000002800221, 0x0000000002800021,
|
||||
0x0000000002800021, 0x0000000022800221, 0x0000000000400421, 0x0000000000400061,
|
||||
0x000000000a004021, 0x0000000022004021, 0x0000000022004221, 0x0000000002800021,
|
||||
0x0000000002800221, 0x0000000002800021, 0x0000000002800021, 0x0000000022800221,
|
||||
0x0000000200400421, 0x0000000000400061, 0x000000000a004021, 0x0000000022004021,
|
||||
// Entry 40 - 5F
|
||||
0x0000000022004221, 0x0000000002800021, 0x0000000002800221, 0x0000000002800021,
|
||||
0x0000000002800021, 0x0000000022800221, 0x0000000040400421, 0x0000000044400061,
|
||||
0x000000005a004021, 0x0000000062004021, 0x0000000062004221, 0x0000000042800021,
|
||||
0x0000000042800221, 0x0000000042800021, 0x0000000042800021, 0x0000000062800221,
|
||||
0x0000000200400421, 0x0000000000400061, 0x000000000a004021, 0x0000000022004021,
|
||||
0x0000000022004221, 0x0000000002800021, 0x0000000002800221, 0x0000000002800021,
|
||||
0x0000000002800021, 0x0000000022800221, 0x0000000040400421, 0x0000000044400061,
|
||||
0x000000005a004021, 0x0000000062004021, 0x0000000062004221, 0x0000000042800021,
|
||||
// Entry 60 - 7F
|
||||
0x0000000042800221, 0x0000000042800021, 0x0000000042800021, 0x0000000062800221,
|
||||
} // Size: 824 bytes
|
||||
|
||||
// Slots used for cardinal: A6 of 0xFF rules; 24 of 0xFF indexes; 37 of 64 sets
|
||||
|
||||
// Total table size 3860 bytes (3KiB); checksum: AAFBF21
|
||||
417
vendor/golang.org/x/text/internal/catmsg/catmsg.go
generated
vendored
Normal file
417
vendor/golang.org/x/text/internal/catmsg/catmsg.go
generated
vendored
Normal file
|
|
@ -0,0 +1,417 @@
|
|||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package catmsg contains support types for package x/text/message/catalog.
|
||||
//
|
||||
// This package contains the low-level implementations of Message used by the
|
||||
// catalog package and provides primitives for other packages to implement their
|
||||
// own. For instance, the plural package provides functionality for selecting
|
||||
// translation strings based on the plural category of substitution arguments.
|
||||
//
|
||||
// # Encoding and Decoding
|
||||
//
|
||||
// Catalogs store Messages encoded as a single string. Compiling a message into
|
||||
// a string both results in compacter representation and speeds up evaluation.
|
||||
//
|
||||
// A Message must implement a Compile method to convert its arbitrary
|
||||
// representation to a string. The Compile method takes an Encoder which
|
||||
// facilitates serializing the message. Encoders also provide more context of
|
||||
// the messages's creation (such as for which language the message is intended),
|
||||
// which may not be known at the time of the creation of the message.
|
||||
//
|
||||
// Each message type must also have an accompanying decoder registered to decode
|
||||
// the message. This decoder takes a Decoder argument which provides the
|
||||
// counterparts for the decoding.
|
||||
//
|
||||
// # Renderers
|
||||
//
|
||||
// A Decoder must be initialized with a Renderer implementation. These
|
||||
// implementations must be provided by packages that use Catalogs, typically
|
||||
// formatting packages such as x/text/message. A typical user will not need to
|
||||
// worry about this type; it is only relevant to packages that do string
|
||||
// formatting and want to use the catalog package to handle localized strings.
|
||||
//
|
||||
// A package that uses catalogs for selecting strings receives selection results
|
||||
// as sequence of substrings passed to the Renderer. The following snippet shows
|
||||
// how to express the above example using the message package.
|
||||
//
|
||||
// message.Set(language.English, "You are %d minute(s) late.",
|
||||
// catalog.Var("minutes", plural.Select(1, "one", "minute")),
|
||||
// catalog.String("You are %[1]d ${minutes} late."))
|
||||
//
|
||||
// p := message.NewPrinter(language.English)
|
||||
// p.Printf("You are %d minute(s) late.", 5) // always 5 minutes late.
|
||||
//
|
||||
// To evaluate the Printf, package message wraps the arguments in a Renderer
|
||||
// that is passed to the catalog for message decoding. The call sequence that
|
||||
// results from evaluating the above message, assuming the person is rather
|
||||
// tardy, is:
|
||||
//
|
||||
// Render("You are %[1]d ")
|
||||
// Arg(1)
|
||||
// Render("minutes")
|
||||
// Render(" late.")
|
||||
//
|
||||
// The calls to Arg is caused by the plural.Select execution, which evaluates
|
||||
// the argument to determine whether the singular or plural message form should
|
||||
// be selected. The calls to Render reports the partial results to the message
|
||||
// package for further evaluation.
|
||||
package catmsg
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
// A Handle refers to a registered message type.
|
||||
type Handle int
|
||||
|
||||
// A Handler decodes and evaluates data compiled by a Message and sends the
|
||||
// result to the Decoder. The output may depend on the value of the substitution
|
||||
// arguments, accessible by the Decoder's Arg method. The Handler returns false
|
||||
// if there is no translation for the given substitution arguments.
|
||||
type Handler func(d *Decoder) bool
|
||||
|
||||
// Register records the existence of a message type and returns a Handle that
|
||||
// can be used in the Encoder's EncodeMessageType method to create such
|
||||
// messages. The prefix of the name should be the package path followed by
|
||||
// an optional disambiguating string.
|
||||
// Register will panic if a handle for the same name was already registered.
|
||||
func Register(name string, handler Handler) Handle {
|
||||
mutex.Lock()
|
||||
defer mutex.Unlock()
|
||||
|
||||
if _, ok := names[name]; ok {
|
||||
panic(fmt.Errorf("catmsg: handler for %q already exists", name))
|
||||
}
|
||||
h := Handle(len(handlers))
|
||||
names[name] = h
|
||||
handlers = append(handlers, handler)
|
||||
return h
|
||||
}
|
||||
|
||||
// These handlers require fixed positions in the handlers slice.
|
||||
const (
|
||||
msgVars Handle = iota
|
||||
msgFirst
|
||||
msgRaw
|
||||
msgString
|
||||
msgAffix
|
||||
// Leave some arbitrary room for future expansion: 20 should suffice.
|
||||
numInternal = 20
|
||||
)
|
||||
|
||||
const prefix = "golang.org/x/text/internal/catmsg."
|
||||
|
||||
var (
|
||||
// TODO: find a more stable way to link handles to message types.
|
||||
mutex sync.Mutex
|
||||
names = map[string]Handle{
|
||||
prefix + "Vars": msgVars,
|
||||
prefix + "First": msgFirst,
|
||||
prefix + "Raw": msgRaw,
|
||||
prefix + "String": msgString,
|
||||
prefix + "Affix": msgAffix,
|
||||
}
|
||||
handlers = make([]Handler, numInternal)
|
||||
)
|
||||
|
||||
func init() {
|
||||
// This handler is a message type wrapper that initializes a decoder
|
||||
// with a variable block. This message type, if present, is always at the
|
||||
// start of an encoded message.
|
||||
handlers[msgVars] = func(d *Decoder) bool {
|
||||
blockSize := int(d.DecodeUint())
|
||||
d.vars = d.data[:blockSize]
|
||||
d.data = d.data[blockSize:]
|
||||
return d.executeMessage()
|
||||
}
|
||||
|
||||
// First takes the first message in a sequence that results in a match for
|
||||
// the given substitution arguments.
|
||||
handlers[msgFirst] = func(d *Decoder) bool {
|
||||
for !d.Done() {
|
||||
if d.ExecuteMessage() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
handlers[msgRaw] = func(d *Decoder) bool {
|
||||
d.Render(d.data)
|
||||
return true
|
||||
}
|
||||
|
||||
// A String message alternates between a string constant and a variable
|
||||
// substitution.
|
||||
handlers[msgString] = func(d *Decoder) bool {
|
||||
for !d.Done() {
|
||||
if str := d.DecodeString(); str != "" {
|
||||
d.Render(str)
|
||||
}
|
||||
if d.Done() {
|
||||
break
|
||||
}
|
||||
d.ExecuteSubstitution()
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
handlers[msgAffix] = func(d *Decoder) bool {
|
||||
// TODO: use an alternative method for common cases.
|
||||
prefix := d.DecodeString()
|
||||
suffix := d.DecodeString()
|
||||
if prefix != "" {
|
||||
d.Render(prefix)
|
||||
}
|
||||
ret := d.ExecuteMessage()
|
||||
if suffix != "" {
|
||||
d.Render(suffix)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
// ErrIncomplete indicates a compiled message does not define translations
|
||||
// for all possible argument values. If this message is returned, evaluating
|
||||
// a message may result in the ErrNoMatch error.
|
||||
ErrIncomplete = errors.New("catmsg: incomplete message; may not give result for all inputs")
|
||||
|
||||
// ErrNoMatch indicates no translation message matched the given input
|
||||
// parameters when evaluating a message.
|
||||
ErrNoMatch = errors.New("catmsg: no translation for inputs")
|
||||
)
|
||||
|
||||
// A Message holds a collection of translations for the same phrase that may
|
||||
// vary based on the values of substitution arguments.
|
||||
type Message interface {
|
||||
// Compile encodes the format string(s) of the message as a string for later
|
||||
// evaluation.
|
||||
//
|
||||
// The first call Compile makes on the encoder must be EncodeMessageType.
|
||||
// The handle passed to this call may either be a handle returned by
|
||||
// Register to encode a single custom message, or HandleFirst followed by
|
||||
// a sequence of calls to EncodeMessage.
|
||||
//
|
||||
// Compile must return ErrIncomplete if it is possible for evaluation to
|
||||
// not match any translation for a given set of formatting parameters.
|
||||
// For example, selecting a translation based on plural form may not yield
|
||||
// a match if the form "Other" is not one of the selectors.
|
||||
//
|
||||
// Compile may return any other application-specific error. For backwards
|
||||
// compatibility with package like fmt, which often do not do sanity
|
||||
// checking of format strings ahead of time, Compile should still make an
|
||||
// effort to have some sensible fallback in case of an error.
|
||||
Compile(e *Encoder) error
|
||||
}
|
||||
|
||||
// Compile converts a Message to a data string that can be stored in a Catalog.
|
||||
// The resulting string can subsequently be decoded by passing to the Execute
|
||||
// method of a Decoder.
|
||||
func Compile(tag language.Tag, macros Dictionary, m Message) (data string, err error) {
|
||||
// TODO: pass macros so they can be used for validation.
|
||||
v := &Encoder{inBody: true} // encoder for variables
|
||||
v.root = v
|
||||
e := &Encoder{root: v, parent: v, tag: tag} // encoder for messages
|
||||
err = m.Compile(e)
|
||||
// This package serves te message package, which in turn is meant to be a
|
||||
// drop-in replacement for fmt. With the fmt package, format strings are
|
||||
// evaluated lazily and errors are handled by substituting strings in the
|
||||
// result, rather then returning an error. Dealing with multiple languages
|
||||
// makes it more important to check errors ahead of time. We chose to be
|
||||
// consistent and compatible and allow graceful degradation in case of
|
||||
// errors.
|
||||
buf := e.buf[stripPrefix(e.buf):]
|
||||
if len(v.buf) > 0 {
|
||||
// Prepend variable block.
|
||||
b := make([]byte, 1+maxVarintBytes+len(v.buf)+len(buf))
|
||||
b[0] = byte(msgVars)
|
||||
b = b[:1+encodeUint(b[1:], uint64(len(v.buf)))]
|
||||
b = append(b, v.buf...)
|
||||
b = append(b, buf...)
|
||||
buf = b
|
||||
}
|
||||
if err == nil {
|
||||
err = v.err
|
||||
}
|
||||
return string(buf), err
|
||||
}
|
||||
|
||||
// FirstOf is a message type that prints the first message in the sequence that
|
||||
// resolves to a match for the given substitution arguments.
|
||||
type FirstOf []Message
|
||||
|
||||
// Compile implements Message.
|
||||
func (s FirstOf) Compile(e *Encoder) error {
|
||||
e.EncodeMessageType(msgFirst)
|
||||
err := ErrIncomplete
|
||||
for i, m := range s {
|
||||
if err == nil {
|
||||
return fmt.Errorf("catalog: message argument %d is complete and blocks subsequent messages", i-1)
|
||||
}
|
||||
err = e.EncodeMessage(m)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Var defines a message that can be substituted for a placeholder of the same
|
||||
// name. If an expression does not result in a string after evaluation, Name is
|
||||
// used as the substitution. For example:
|
||||
//
|
||||
// Var{
|
||||
// Name: "minutes",
|
||||
// Message: plural.Select(1, "one", "minute"),
|
||||
// }
|
||||
//
|
||||
// will resolve to minute for singular and minutes for plural forms.
|
||||
type Var struct {
|
||||
Name string
|
||||
Message Message
|
||||
}
|
||||
|
||||
var errIsVar = errors.New("catmsg: variable used as message")
|
||||
|
||||
// Compile implements Message.
|
||||
//
|
||||
// Note that this method merely registers a variable; it does not create an
|
||||
// encoded message.
|
||||
func (v *Var) Compile(e *Encoder) error {
|
||||
if err := e.addVar(v.Name, v.Message); err != nil {
|
||||
return err
|
||||
}
|
||||
// Using a Var by itself is an error. If it is in a sequence followed by
|
||||
// other messages referring to it, this error will be ignored.
|
||||
return errIsVar
|
||||
}
|
||||
|
||||
// Raw is a message consisting of a single format string that is passed as is
|
||||
// to the Renderer.
|
||||
//
|
||||
// Note that a Renderer may still do its own variable substitution.
|
||||
type Raw string
|
||||
|
||||
// Compile implements Message.
|
||||
func (r Raw) Compile(e *Encoder) (err error) {
|
||||
e.EncodeMessageType(msgRaw)
|
||||
// Special case: raw strings don't have a size encoding and so don't use
|
||||
// EncodeString.
|
||||
e.buf = append(e.buf, r...)
|
||||
return nil
|
||||
}
|
||||
|
||||
// String is a message consisting of a single format string which contains
|
||||
// placeholders that may be substituted with variables.
|
||||
//
|
||||
// Variable substitutions are marked with placeholders and a variable name of
|
||||
// the form ${name}. Any other substitutions such as Go templates or
|
||||
// printf-style substitutions are left to be done by the Renderer.
|
||||
//
|
||||
// When evaluation a string interpolation, a Renderer will receive separate
|
||||
// calls for each placeholder and interstitial string. For example, for the
|
||||
// message: "%[1]v ${invites} %[2]v to ${their} party." The sequence of calls
|
||||
// is:
|
||||
//
|
||||
// d.Render("%[1]v ")
|
||||
// d.Arg(1)
|
||||
// d.Render(resultOfInvites)
|
||||
// d.Render(" %[2]v to ")
|
||||
// d.Arg(2)
|
||||
// d.Render(resultOfTheir)
|
||||
// d.Render(" party.")
|
||||
//
|
||||
// where the messages for "invites" and "their" both use a plural.Select
|
||||
// referring to the first argument.
|
||||
//
|
||||
// Strings may also invoke macros. Macros are essentially variables that can be
|
||||
// reused. Macros may, for instance, be used to make selections between
|
||||
// different conjugations of a verb. See the catalog package description for an
|
||||
// overview of macros.
|
||||
type String string
|
||||
|
||||
// Compile implements Message. It parses the placeholder formats and returns
|
||||
// any error.
|
||||
func (s String) Compile(e *Encoder) (err error) {
|
||||
msg := string(s)
|
||||
const subStart = "${"
|
||||
hasHeader := false
|
||||
p := 0
|
||||
b := []byte{}
|
||||
for {
|
||||
i := strings.Index(msg[p:], subStart)
|
||||
if i == -1 {
|
||||
break
|
||||
}
|
||||
b = append(b, msg[p:p+i]...)
|
||||
p += i + len(subStart)
|
||||
if i = strings.IndexByte(msg[p:], '}'); i == -1 {
|
||||
b = append(b, "$!(MISSINGBRACE)"...)
|
||||
err = fmt.Errorf("catmsg: missing '}'")
|
||||
p = len(msg)
|
||||
break
|
||||
}
|
||||
name := strings.TrimSpace(msg[p : p+i])
|
||||
if q := strings.IndexByte(name, '('); q == -1 {
|
||||
if !hasHeader {
|
||||
hasHeader = true
|
||||
e.EncodeMessageType(msgString)
|
||||
}
|
||||
e.EncodeString(string(b))
|
||||
e.EncodeSubstitution(name)
|
||||
b = b[:0]
|
||||
} else if j := strings.IndexByte(name[q:], ')'); j == -1 {
|
||||
// TODO: what should the error be?
|
||||
b = append(b, "$!(MISSINGPAREN)"...)
|
||||
err = fmt.Errorf("catmsg: missing ')'")
|
||||
} else if x, sErr := strconv.ParseUint(strings.TrimSpace(name[q+1:q+j]), 10, 32); sErr != nil {
|
||||
// TODO: handle more than one argument
|
||||
b = append(b, "$!(BADNUM)"...)
|
||||
err = fmt.Errorf("catmsg: invalid number %q", strings.TrimSpace(name[q+1:q+j]))
|
||||
} else {
|
||||
if !hasHeader {
|
||||
hasHeader = true
|
||||
e.EncodeMessageType(msgString)
|
||||
}
|
||||
e.EncodeString(string(b))
|
||||
e.EncodeSubstitution(name[:q], int(x))
|
||||
b = b[:0]
|
||||
}
|
||||
p += i + 1
|
||||
}
|
||||
b = append(b, msg[p:]...)
|
||||
if !hasHeader {
|
||||
// Simplify string to a raw string.
|
||||
Raw(string(b)).Compile(e)
|
||||
} else if len(b) > 0 {
|
||||
e.EncodeString(string(b))
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// Affix is a message that adds a prefix and suffix to another message.
|
||||
// This is mostly used add back whitespace to a translation that was stripped
|
||||
// before sending it out.
|
||||
type Affix struct {
|
||||
Message Message
|
||||
Prefix string
|
||||
Suffix string
|
||||
}
|
||||
|
||||
// Compile implements Message.
|
||||
func (a Affix) Compile(e *Encoder) (err error) {
|
||||
// TODO: consider adding a special message type that just adds a single
|
||||
// return. This is probably common enough to handle the majority of cases.
|
||||
// Get some stats first, though.
|
||||
e.EncodeMessageType(msgAffix)
|
||||
e.EncodeString(a.Prefix)
|
||||
e.EncodeString(a.Suffix)
|
||||
e.EncodeMessage(a.Message)
|
||||
return nil
|
||||
}
|
||||
407
vendor/golang.org/x/text/internal/catmsg/codec.go
generated
vendored
Normal file
407
vendor/golang.org/x/text/internal/catmsg/codec.go
generated
vendored
Normal file
|
|
@ -0,0 +1,407 @@
|
|||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package catmsg
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
// A Renderer renders a Message.
|
||||
type Renderer interface {
|
||||
// Render renders the given string. The given string may be interpreted as a
|
||||
// format string, such as the one used by the fmt package or a template.
|
||||
Render(s string)
|
||||
|
||||
// Arg returns the i-th argument passed to format a message. This method
|
||||
// should return nil if there is no such argument. Messages need access to
|
||||
// arguments to allow selecting a message based on linguistic features of
|
||||
// those arguments.
|
||||
Arg(i int) interface{}
|
||||
}
|
||||
|
||||
// A Dictionary specifies a source of messages, including variables or macros.
|
||||
type Dictionary interface {
|
||||
// Lookup returns the message for the given key. It returns false for ok if
|
||||
// such a message could not be found.
|
||||
Lookup(key string) (data string, ok bool)
|
||||
|
||||
// TODO: consider returning an interface, instead of a string. This will
|
||||
// allow implementations to do their own message type decoding.
|
||||
}
|
||||
|
||||
// An Encoder serializes a Message to a string.
|
||||
type Encoder struct {
|
||||
// The root encoder is used for storing encoded variables.
|
||||
root *Encoder
|
||||
// The parent encoder provides the surrounding scopes for resolving variable
|
||||
// names.
|
||||
parent *Encoder
|
||||
|
||||
tag language.Tag
|
||||
|
||||
// buf holds the encoded message so far. After a message completes encoding,
|
||||
// the contents of buf, prefixed by the encoded length, are flushed to the
|
||||
// parent buffer.
|
||||
buf []byte
|
||||
|
||||
// vars is the lookup table of variables in the current scope.
|
||||
vars []keyVal
|
||||
|
||||
err error
|
||||
inBody bool // if false next call must be EncodeMessageType
|
||||
}
|
||||
|
||||
type keyVal struct {
|
||||
key string
|
||||
offset int
|
||||
}
|
||||
|
||||
// Language reports the language for which the encoded message will be stored
|
||||
// in the Catalog.
|
||||
func (e *Encoder) Language() language.Tag { return e.tag }
|
||||
|
||||
func (e *Encoder) setError(err error) {
|
||||
if e.root.err == nil {
|
||||
e.root.err = err
|
||||
}
|
||||
}
|
||||
|
||||
// EncodeUint encodes x.
|
||||
func (e *Encoder) EncodeUint(x uint64) {
|
||||
e.checkInBody()
|
||||
var buf [maxVarintBytes]byte
|
||||
n := encodeUint(buf[:], x)
|
||||
e.buf = append(e.buf, buf[:n]...)
|
||||
}
|
||||
|
||||
// EncodeString encodes s.
|
||||
func (e *Encoder) EncodeString(s string) {
|
||||
e.checkInBody()
|
||||
e.EncodeUint(uint64(len(s)))
|
||||
e.buf = append(e.buf, s...)
|
||||
}
|
||||
|
||||
// EncodeMessageType marks the current message to be of type h.
|
||||
//
|
||||
// It must be the first call of a Message's Compile method.
|
||||
func (e *Encoder) EncodeMessageType(h Handle) {
|
||||
if e.inBody {
|
||||
panic("catmsg: EncodeMessageType not the first method called")
|
||||
}
|
||||
e.inBody = true
|
||||
e.EncodeUint(uint64(h))
|
||||
}
|
||||
|
||||
// EncodeMessage serializes the given message inline at the current position.
|
||||
func (e *Encoder) EncodeMessage(m Message) error {
|
||||
e = &Encoder{root: e.root, parent: e, tag: e.tag}
|
||||
err := m.Compile(e)
|
||||
if _, ok := m.(*Var); !ok {
|
||||
e.flushTo(e.parent)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (e *Encoder) checkInBody() {
|
||||
if !e.inBody {
|
||||
panic("catmsg: expected prior call to EncodeMessageType")
|
||||
}
|
||||
}
|
||||
|
||||
// stripPrefix indicates the number of prefix bytes that must be stripped to
|
||||
// turn a single-element sequence into a message that is just this single member
|
||||
// without its size prefix. If the message can be stripped, b[1:n] contains the
|
||||
// size prefix.
|
||||
func stripPrefix(b []byte) (n int) {
|
||||
if len(b) > 0 && Handle(b[0]) == msgFirst {
|
||||
x, n, _ := decodeUint(b[1:])
|
||||
if 1+n+int(x) == len(b) {
|
||||
return 1 + n
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (e *Encoder) flushTo(dst *Encoder) {
|
||||
data := e.buf
|
||||
p := stripPrefix(data)
|
||||
if p > 0 {
|
||||
data = data[1:]
|
||||
} else {
|
||||
// Prefix the size.
|
||||
dst.EncodeUint(uint64(len(data)))
|
||||
}
|
||||
dst.buf = append(dst.buf, data...)
|
||||
}
|
||||
|
||||
func (e *Encoder) addVar(key string, m Message) error {
|
||||
for _, v := range e.parent.vars {
|
||||
if v.key == key {
|
||||
err := fmt.Errorf("catmsg: duplicate variable %q", key)
|
||||
e.setError(err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
scope := e.parent
|
||||
// If a variable message is Incomplete, and does not evaluate to a message
|
||||
// during execution, we fall back to the variable name. We encode this by
|
||||
// appending the variable name if the message reports it's incomplete.
|
||||
|
||||
err := m.Compile(e)
|
||||
if err != ErrIncomplete {
|
||||
e.setError(err)
|
||||
}
|
||||
switch {
|
||||
case len(e.buf) == 1 && Handle(e.buf[0]) == msgFirst: // empty sequence
|
||||
e.buf = e.buf[:0]
|
||||
e.inBody = false
|
||||
fallthrough
|
||||
case len(e.buf) == 0:
|
||||
// Empty message.
|
||||
if err := String(key).Compile(e); err != nil {
|
||||
e.setError(err)
|
||||
}
|
||||
case err == ErrIncomplete:
|
||||
if Handle(e.buf[0]) != msgFirst {
|
||||
seq := &Encoder{root: e.root, parent: e}
|
||||
seq.EncodeMessageType(msgFirst)
|
||||
e.flushTo(seq)
|
||||
e = seq
|
||||
}
|
||||
// e contains a sequence; append the fallback string.
|
||||
e.EncodeMessage(String(key))
|
||||
}
|
||||
|
||||
// Flush result to variable heap.
|
||||
offset := len(e.root.buf)
|
||||
e.flushTo(e.root)
|
||||
e.buf = e.buf[:0]
|
||||
|
||||
// Record variable offset in current scope.
|
||||
scope.vars = append(scope.vars, keyVal{key: key, offset: offset})
|
||||
return err
|
||||
}
|
||||
|
||||
const (
|
||||
substituteVar = iota
|
||||
substituteMacro
|
||||
substituteError
|
||||
)
|
||||
|
||||
// EncodeSubstitution inserts a resolved reference to a variable or macro.
|
||||
//
|
||||
// This call must be matched with a call to ExecuteSubstitution at decoding
|
||||
// time.
|
||||
func (e *Encoder) EncodeSubstitution(name string, arguments ...int) {
|
||||
if arity := len(arguments); arity > 0 {
|
||||
// TODO: also resolve macros.
|
||||
e.EncodeUint(substituteMacro)
|
||||
e.EncodeString(name)
|
||||
for _, a := range arguments {
|
||||
e.EncodeUint(uint64(a))
|
||||
}
|
||||
return
|
||||
}
|
||||
for scope := e; scope != nil; scope = scope.parent {
|
||||
for _, v := range scope.vars {
|
||||
if v.key != name {
|
||||
continue
|
||||
}
|
||||
e.EncodeUint(substituteVar) // TODO: support arity > 0
|
||||
e.EncodeUint(uint64(v.offset))
|
||||
return
|
||||
}
|
||||
}
|
||||
// TODO: refer to dictionary-wide scoped variables.
|
||||
e.EncodeUint(substituteError)
|
||||
e.EncodeString(name)
|
||||
e.setError(fmt.Errorf("catmsg: unknown var %q", name))
|
||||
}
|
||||
|
||||
// A Decoder deserializes and evaluates messages that are encoded by an encoder.
|
||||
type Decoder struct {
|
||||
tag language.Tag
|
||||
dst Renderer
|
||||
macros Dictionary
|
||||
|
||||
err error
|
||||
vars string
|
||||
data string
|
||||
|
||||
macroArg int // TODO: allow more than one argument
|
||||
}
|
||||
|
||||
// NewDecoder returns a new Decoder.
|
||||
//
|
||||
// Decoders are designed to be reused for multiple invocations of Execute.
|
||||
// Only one goroutine may call Execute concurrently.
|
||||
func NewDecoder(tag language.Tag, r Renderer, macros Dictionary) *Decoder {
|
||||
return &Decoder{
|
||||
tag: tag,
|
||||
dst: r,
|
||||
macros: macros,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Decoder) setError(err error) {
|
||||
if d.err == nil {
|
||||
d.err = err
|
||||
}
|
||||
}
|
||||
|
||||
// Language returns the language in which the message is being rendered.
|
||||
//
|
||||
// The destination language may be a child language of the language used for
|
||||
// encoding. For instance, a decoding language of "pt-PT" is consistent with an
|
||||
// encoding language of "pt".
|
||||
func (d *Decoder) Language() language.Tag { return d.tag }
|
||||
|
||||
// Done reports whether there are more bytes to process in this message.
|
||||
func (d *Decoder) Done() bool { return len(d.data) == 0 }
|
||||
|
||||
// Render implements Renderer.
|
||||
func (d *Decoder) Render(s string) { d.dst.Render(s) }
|
||||
|
||||
// Arg implements Renderer.
|
||||
//
|
||||
// During evaluation of macros, the argument positions may be mapped to
|
||||
// arguments that differ from the original call.
|
||||
func (d *Decoder) Arg(i int) interface{} {
|
||||
if d.macroArg != 0 {
|
||||
if i != 1 {
|
||||
panic("catmsg: only macros with single argument supported")
|
||||
}
|
||||
i = d.macroArg
|
||||
}
|
||||
return d.dst.Arg(i)
|
||||
}
|
||||
|
||||
// DecodeUint decodes a number that was encoded with EncodeUint and advances the
|
||||
// position.
|
||||
func (d *Decoder) DecodeUint() uint64 {
|
||||
x, n, err := decodeUintString(d.data)
|
||||
d.data = d.data[n:]
|
||||
if err != nil {
|
||||
d.setError(err)
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
// DecodeString decodes a string that was encoded with EncodeString and advances
|
||||
// the position.
|
||||
func (d *Decoder) DecodeString() string {
|
||||
size := d.DecodeUint()
|
||||
s := d.data[:size]
|
||||
d.data = d.data[size:]
|
||||
return s
|
||||
}
|
||||
|
||||
// SkipMessage skips the message at the current location and advances the
|
||||
// position.
|
||||
func (d *Decoder) SkipMessage() {
|
||||
n := int(d.DecodeUint())
|
||||
d.data = d.data[n:]
|
||||
}
|
||||
|
||||
// Execute decodes and evaluates msg.
|
||||
//
|
||||
// Only one goroutine may call execute.
|
||||
func (d *Decoder) Execute(msg string) error {
|
||||
d.err = nil
|
||||
if !d.execute(msg) {
|
||||
return ErrNoMatch
|
||||
}
|
||||
return d.err
|
||||
}
|
||||
|
||||
func (d *Decoder) execute(msg string) bool {
|
||||
saved := d.data
|
||||
d.data = msg
|
||||
ok := d.executeMessage()
|
||||
d.data = saved
|
||||
return ok
|
||||
}
|
||||
|
||||
// executeMessageFromData is like execute, but also decodes a leading message
|
||||
// size and clips the given string accordingly.
|
||||
//
|
||||
// It reports the number of bytes consumed and whether a message was selected.
|
||||
func (d *Decoder) executeMessageFromData(s string) (n int, ok bool) {
|
||||
saved := d.data
|
||||
d.data = s
|
||||
size := int(d.DecodeUint())
|
||||
n = len(s) - len(d.data)
|
||||
// Sanitize the setting. This allows skipping a size argument for
|
||||
// RawString and method Done.
|
||||
d.data = d.data[:size]
|
||||
ok = d.executeMessage()
|
||||
n += size - len(d.data)
|
||||
d.data = saved
|
||||
return n, ok
|
||||
}
|
||||
|
||||
var errUnknownHandler = errors.New("catmsg: string contains unsupported handler")
|
||||
|
||||
// executeMessage reads the handle id, initializes the decoder and executes the
|
||||
// message. It is assumed that all of d.data[d.p:] is the single message.
|
||||
func (d *Decoder) executeMessage() bool {
|
||||
if d.Done() {
|
||||
// We interpret no data as a valid empty message.
|
||||
return true
|
||||
}
|
||||
handle := d.DecodeUint()
|
||||
|
||||
var fn Handler
|
||||
mutex.Lock()
|
||||
if int(handle) < len(handlers) {
|
||||
fn = handlers[handle]
|
||||
}
|
||||
mutex.Unlock()
|
||||
if fn == nil {
|
||||
d.setError(errUnknownHandler)
|
||||
d.execute(fmt.Sprintf("\x02$!(UNKNOWNMSGHANDLER=%#x)", handle))
|
||||
return true
|
||||
}
|
||||
return fn(d)
|
||||
}
|
||||
|
||||
// ExecuteMessage decodes and executes the message at the current position.
|
||||
func (d *Decoder) ExecuteMessage() bool {
|
||||
n, ok := d.executeMessageFromData(d.data)
|
||||
d.data = d.data[n:]
|
||||
return ok
|
||||
}
|
||||
|
||||
// ExecuteSubstitution executes the message corresponding to the substitution
|
||||
// as encoded by EncodeSubstitution.
|
||||
func (d *Decoder) ExecuteSubstitution() {
|
||||
switch x := d.DecodeUint(); x {
|
||||
case substituteVar:
|
||||
offset := d.DecodeUint()
|
||||
d.executeMessageFromData(d.vars[offset:])
|
||||
case substituteMacro:
|
||||
name := d.DecodeString()
|
||||
data, ok := d.macros.Lookup(name)
|
||||
old := d.macroArg
|
||||
// TODO: support macros of arity other than 1.
|
||||
d.macroArg = int(d.DecodeUint())
|
||||
switch {
|
||||
case !ok:
|
||||
// TODO: detect this at creation time.
|
||||
d.setError(fmt.Errorf("catmsg: undefined macro %q", name))
|
||||
fallthrough
|
||||
case !d.execute(data):
|
||||
d.dst.Render(name) // fall back to macro name.
|
||||
}
|
||||
d.macroArg = old
|
||||
case substituteError:
|
||||
d.dst.Render(d.DecodeString())
|
||||
default:
|
||||
panic("catmsg: unreachable")
|
||||
}
|
||||
}
|
||||
62
vendor/golang.org/x/text/internal/catmsg/varint.go
generated
vendored
Normal file
62
vendor/golang.org/x/text/internal/catmsg/varint.go
generated
vendored
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package catmsg
|
||||
|
||||
// This file implements varint encoding analogous to the one in encoding/binary.
|
||||
// We need a string version of this function, so we add that here and then add
|
||||
// the rest for consistency.
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
errIllegalVarint = errors.New("catmsg: illegal varint")
|
||||
errVarintTooLarge = errors.New("catmsg: varint too large for uint64")
|
||||
)
|
||||
|
||||
const maxVarintBytes = 10 // maximum length of a varint
|
||||
|
||||
// encodeUint encodes x as a variable-sized integer into buf and returns the
|
||||
// number of bytes written. buf must be at least maxVarintBytes long
|
||||
func encodeUint(buf []byte, x uint64) (n int) {
|
||||
for ; x > 127; n++ {
|
||||
buf[n] = 0x80 | uint8(x&0x7F)
|
||||
x >>= 7
|
||||
}
|
||||
buf[n] = uint8(x)
|
||||
n++
|
||||
return n
|
||||
}
|
||||
|
||||
func decodeUintString(s string) (x uint64, size int, err error) {
|
||||
i := 0
|
||||
for shift := uint(0); shift < 64; shift += 7 {
|
||||
if i >= len(s) {
|
||||
return 0, i, errIllegalVarint
|
||||
}
|
||||
b := uint64(s[i])
|
||||
i++
|
||||
x |= (b & 0x7F) << shift
|
||||
if b&0x80 == 0 {
|
||||
return x, i, nil
|
||||
}
|
||||
}
|
||||
return 0, i, errVarintTooLarge
|
||||
}
|
||||
|
||||
func decodeUint(b []byte) (x uint64, size int, err error) {
|
||||
i := 0
|
||||
for shift := uint(0); shift < 64; shift += 7 {
|
||||
if i >= len(b) {
|
||||
return 0, i, errIllegalVarint
|
||||
}
|
||||
c := uint64(b[i])
|
||||
i++
|
||||
x |= (c & 0x7F) << shift
|
||||
if c&0x80 == 0 {
|
||||
return x, i, nil
|
||||
}
|
||||
}
|
||||
return 0, i, errVarintTooLarge
|
||||
}
|
||||
41
vendor/golang.org/x/text/internal/format/format.go
generated
vendored
Normal file
41
vendor/golang.org/x/text/internal/format/format.go
generated
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package format contains types for defining language-specific formatting of
|
||||
// values.
|
||||
//
|
||||
// This package is internal now, but will eventually be exposed after the API
|
||||
// settles.
|
||||
package format // import "golang.org/x/text/internal/format"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
// State represents the printer state passed to custom formatters. It provides
|
||||
// access to the fmt.State interface and the sentence and language-related
|
||||
// context.
|
||||
type State interface {
|
||||
fmt.State
|
||||
|
||||
// Language reports the requested language in which to render a message.
|
||||
Language() language.Tag
|
||||
|
||||
// TODO: consider this and removing rune from the Format method in the
|
||||
// Formatter interface.
|
||||
//
|
||||
// Verb returns the format variant to render, analogous to the types used
|
||||
// in fmt. Use 'v' for the default or only variant.
|
||||
// Verb() rune
|
||||
|
||||
// TODO: more info:
|
||||
// - sentence context such as linguistic features passed by the translator.
|
||||
}
|
||||
|
||||
// Formatter is analogous to fmt.Formatter.
|
||||
type Formatter interface {
|
||||
Format(state State, verb rune)
|
||||
}
|
||||
358
vendor/golang.org/x/text/internal/format/parser.go
generated
vendored
Normal file
358
vendor/golang.org/x/text/internal/format/parser.go
generated
vendored
Normal file
|
|
@ -0,0 +1,358 @@
|
|||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package format
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// A Parser parses a format string. The result from the parse are set in the
|
||||
// struct fields.
|
||||
type Parser struct {
|
||||
Verb rune
|
||||
|
||||
WidthPresent bool
|
||||
PrecPresent bool
|
||||
Minus bool
|
||||
Plus bool
|
||||
Sharp bool
|
||||
Space bool
|
||||
Zero bool
|
||||
|
||||
// For the formats %+v %#v, we set the plusV/sharpV flags
|
||||
// and clear the plus/sharp flags since %+v and %#v are in effect
|
||||
// different, flagless formats set at the top level.
|
||||
PlusV bool
|
||||
SharpV bool
|
||||
|
||||
HasIndex bool
|
||||
|
||||
Width int
|
||||
Prec int // precision
|
||||
|
||||
// retain arguments across calls.
|
||||
Args []interface{}
|
||||
// retain current argument number across calls
|
||||
ArgNum int
|
||||
|
||||
// reordered records whether the format string used argument reordering.
|
||||
Reordered bool
|
||||
// goodArgNum records whether the most recent reordering directive was valid.
|
||||
goodArgNum bool
|
||||
|
||||
// position info
|
||||
format string
|
||||
startPos int
|
||||
endPos int
|
||||
Status Status
|
||||
}
|
||||
|
||||
// Reset initializes a parser to scan format strings for the given args.
|
||||
func (p *Parser) Reset(args []interface{}) {
|
||||
p.Args = args
|
||||
p.ArgNum = 0
|
||||
p.startPos = 0
|
||||
p.Reordered = false
|
||||
}
|
||||
|
||||
// Text returns the part of the format string that was parsed by the last call
|
||||
// to Scan. It returns the original substitution clause if the current scan
|
||||
// parsed a substitution.
|
||||
func (p *Parser) Text() string { return p.format[p.startPos:p.endPos] }
|
||||
|
||||
// SetFormat sets a new format string to parse. It does not reset the argument
|
||||
// count.
|
||||
func (p *Parser) SetFormat(format string) {
|
||||
p.format = format
|
||||
p.startPos = 0
|
||||
p.endPos = 0
|
||||
}
|
||||
|
||||
// Status indicates the result type of a call to Scan.
|
||||
type Status int
|
||||
|
||||
const (
|
||||
StatusText Status = iota
|
||||
StatusSubstitution
|
||||
StatusBadWidthSubstitution
|
||||
StatusBadPrecSubstitution
|
||||
StatusNoVerb
|
||||
StatusBadArgNum
|
||||
StatusMissingArg
|
||||
)
|
||||
|
||||
// ClearFlags reset the parser to default behavior.
|
||||
func (p *Parser) ClearFlags() {
|
||||
p.WidthPresent = false
|
||||
p.PrecPresent = false
|
||||
p.Minus = false
|
||||
p.Plus = false
|
||||
p.Sharp = false
|
||||
p.Space = false
|
||||
p.Zero = false
|
||||
|
||||
p.PlusV = false
|
||||
p.SharpV = false
|
||||
|
||||
p.HasIndex = false
|
||||
}
|
||||
|
||||
// Scan scans the next part of the format string and sets the status to
|
||||
// indicate whether it scanned a string literal, substitution or error.
|
||||
func (p *Parser) Scan() bool {
|
||||
p.Status = StatusText
|
||||
format := p.format
|
||||
end := len(format)
|
||||
if p.endPos >= end {
|
||||
return false
|
||||
}
|
||||
afterIndex := false // previous item in format was an index like [3].
|
||||
|
||||
p.startPos = p.endPos
|
||||
p.goodArgNum = true
|
||||
i := p.startPos
|
||||
for i < end && format[i] != '%' {
|
||||
i++
|
||||
}
|
||||
if i > p.startPos {
|
||||
p.endPos = i
|
||||
return true
|
||||
}
|
||||
// Process one verb
|
||||
i++
|
||||
|
||||
p.Status = StatusSubstitution
|
||||
|
||||
// Do we have flags?
|
||||
p.ClearFlags()
|
||||
|
||||
simpleFormat:
|
||||
for ; i < end; i++ {
|
||||
c := p.format[i]
|
||||
switch c {
|
||||
case '#':
|
||||
p.Sharp = true
|
||||
case '0':
|
||||
p.Zero = !p.Minus // Only allow zero padding to the left.
|
||||
case '+':
|
||||
p.Plus = true
|
||||
case '-':
|
||||
p.Minus = true
|
||||
p.Zero = false // Do not pad with zeros to the right.
|
||||
case ' ':
|
||||
p.Space = true
|
||||
default:
|
||||
// Fast path for common case of ascii lower case simple verbs
|
||||
// without precision or width or argument indices.
|
||||
if 'a' <= c && c <= 'z' && p.ArgNum < len(p.Args) {
|
||||
if c == 'v' {
|
||||
// Go syntax
|
||||
p.SharpV = p.Sharp
|
||||
p.Sharp = false
|
||||
// Struct-field syntax
|
||||
p.PlusV = p.Plus
|
||||
p.Plus = false
|
||||
}
|
||||
p.Verb = rune(c)
|
||||
p.ArgNum++
|
||||
p.endPos = i + 1
|
||||
return true
|
||||
}
|
||||
// Format is more complex than simple flags and a verb or is malformed.
|
||||
break simpleFormat
|
||||
}
|
||||
}
|
||||
|
||||
// Do we have an explicit argument index?
|
||||
i, afterIndex = p.updateArgNumber(format, i)
|
||||
|
||||
// Do we have width?
|
||||
if i < end && format[i] == '*' {
|
||||
i++
|
||||
p.Width, p.WidthPresent = p.intFromArg()
|
||||
|
||||
if !p.WidthPresent {
|
||||
p.Status = StatusBadWidthSubstitution
|
||||
}
|
||||
|
||||
// We have a negative width, so take its value and ensure
|
||||
// that the minus flag is set
|
||||
if p.Width < 0 {
|
||||
p.Width = -p.Width
|
||||
p.Minus = true
|
||||
p.Zero = false // Do not pad with zeros to the right.
|
||||
}
|
||||
afterIndex = false
|
||||
} else {
|
||||
p.Width, p.WidthPresent, i = parsenum(format, i, end)
|
||||
if afterIndex && p.WidthPresent { // "%[3]2d"
|
||||
p.goodArgNum = false
|
||||
}
|
||||
}
|
||||
|
||||
// Do we have precision?
|
||||
if i+1 < end && format[i] == '.' {
|
||||
i++
|
||||
if afterIndex { // "%[3].2d"
|
||||
p.goodArgNum = false
|
||||
}
|
||||
i, afterIndex = p.updateArgNumber(format, i)
|
||||
if i < end && format[i] == '*' {
|
||||
i++
|
||||
p.Prec, p.PrecPresent = p.intFromArg()
|
||||
// Negative precision arguments don't make sense
|
||||
if p.Prec < 0 {
|
||||
p.Prec = 0
|
||||
p.PrecPresent = false
|
||||
}
|
||||
if !p.PrecPresent {
|
||||
p.Status = StatusBadPrecSubstitution
|
||||
}
|
||||
afterIndex = false
|
||||
} else {
|
||||
p.Prec, p.PrecPresent, i = parsenum(format, i, end)
|
||||
if !p.PrecPresent {
|
||||
p.Prec = 0
|
||||
p.PrecPresent = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !afterIndex {
|
||||
i, afterIndex = p.updateArgNumber(format, i)
|
||||
}
|
||||
p.HasIndex = afterIndex
|
||||
|
||||
if i >= end {
|
||||
p.endPos = i
|
||||
p.Status = StatusNoVerb
|
||||
return true
|
||||
}
|
||||
|
||||
verb, w := utf8.DecodeRuneInString(format[i:])
|
||||
p.endPos = i + w
|
||||
p.Verb = verb
|
||||
|
||||
switch {
|
||||
case verb == '%': // Percent does not absorb operands and ignores f.wid and f.prec.
|
||||
p.startPos = p.endPos - 1
|
||||
p.Status = StatusText
|
||||
case !p.goodArgNum:
|
||||
p.Status = StatusBadArgNum
|
||||
case p.ArgNum >= len(p.Args): // No argument left over to print for the current verb.
|
||||
p.Status = StatusMissingArg
|
||||
p.ArgNum++
|
||||
case verb == 'v':
|
||||
// Go syntax
|
||||
p.SharpV = p.Sharp
|
||||
p.Sharp = false
|
||||
// Struct-field syntax
|
||||
p.PlusV = p.Plus
|
||||
p.Plus = false
|
||||
fallthrough
|
||||
default:
|
||||
p.ArgNum++
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// intFromArg gets the ArgNumth element of Args. On return, isInt reports
|
||||
// whether the argument has integer type.
|
||||
func (p *Parser) intFromArg() (num int, isInt bool) {
|
||||
if p.ArgNum < len(p.Args) {
|
||||
arg := p.Args[p.ArgNum]
|
||||
num, isInt = arg.(int) // Almost always OK.
|
||||
if !isInt {
|
||||
// Work harder.
|
||||
switch v := reflect.ValueOf(arg); v.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
n := v.Int()
|
||||
if int64(int(n)) == n {
|
||||
num = int(n)
|
||||
isInt = true
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
n := v.Uint()
|
||||
if int64(n) >= 0 && uint64(int(n)) == n {
|
||||
num = int(n)
|
||||
isInt = true
|
||||
}
|
||||
default:
|
||||
// Already 0, false.
|
||||
}
|
||||
}
|
||||
p.ArgNum++
|
||||
if tooLarge(num) {
|
||||
num = 0
|
||||
isInt = false
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parseArgNumber returns the value of the bracketed number, minus 1
|
||||
// (explicit argument numbers are one-indexed but we want zero-indexed).
|
||||
// The opening bracket is known to be present at format[0].
|
||||
// The returned values are the index, the number of bytes to consume
|
||||
// up to the closing paren, if present, and whether the number parsed
|
||||
// ok. The bytes to consume will be 1 if no closing paren is present.
|
||||
func parseArgNumber(format string) (index int, wid int, ok bool) {
|
||||
// There must be at least 3 bytes: [n].
|
||||
if len(format) < 3 {
|
||||
return 0, 1, false
|
||||
}
|
||||
|
||||
// Find closing bracket.
|
||||
for i := 1; i < len(format); i++ {
|
||||
if format[i] == ']' {
|
||||
width, ok, newi := parsenum(format, 1, i)
|
||||
if !ok || newi != i {
|
||||
return 0, i + 1, false
|
||||
}
|
||||
return width - 1, i + 1, true // arg numbers are one-indexed and skip paren.
|
||||
}
|
||||
}
|
||||
return 0, 1, false
|
||||
}
|
||||
|
||||
// updateArgNumber returns the next argument to evaluate, which is either the value of the passed-in
|
||||
// argNum or the value of the bracketed integer that begins format[i:]. It also returns
|
||||
// the new value of i, that is, the index of the next byte of the format to process.
|
||||
func (p *Parser) updateArgNumber(format string, i int) (newi int, found bool) {
|
||||
if len(format) <= i || format[i] != '[' {
|
||||
return i, false
|
||||
}
|
||||
p.Reordered = true
|
||||
index, wid, ok := parseArgNumber(format[i:])
|
||||
if ok && 0 <= index && index < len(p.Args) {
|
||||
p.ArgNum = index
|
||||
return i + wid, true
|
||||
}
|
||||
p.goodArgNum = false
|
||||
return i + wid, ok
|
||||
}
|
||||
|
||||
// tooLarge reports whether the magnitude of the integer is
|
||||
// too large to be used as a formatting width or precision.
|
||||
func tooLarge(x int) bool {
|
||||
const max int = 1e6
|
||||
return x > max || x < -max
|
||||
}
|
||||
|
||||
// parsenum converts ASCII to integer. num is 0 (and isnum is false) if no number present.
|
||||
func parsenum(s string, start, end int) (num int, isnum bool, newi int) {
|
||||
if start >= end {
|
||||
return 0, false, end
|
||||
}
|
||||
for newi = start; newi < end && '0' <= s[newi] && s[newi] <= '9'; newi++ {
|
||||
if tooLarge(num) {
|
||||
return 0, false, end // Overflow; crazy long number most likely.
|
||||
}
|
||||
num = num*10 + int(s[newi]-'0')
|
||||
isnum = true
|
||||
}
|
||||
return
|
||||
}
|
||||
49
vendor/golang.org/x/text/internal/internal.go
generated
vendored
Normal file
49
vendor/golang.org/x/text/internal/internal.go
generated
vendored
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package internal contains non-exported functionality that are used by
|
||||
// packages in the text repository.
|
||||
package internal // import "golang.org/x/text/internal"
|
||||
|
||||
import (
|
||||
"sort"
|
||||
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
// SortTags sorts tags in place.
|
||||
func SortTags(tags []language.Tag) {
|
||||
sort.Sort(sorter(tags))
|
||||
}
|
||||
|
||||
type sorter []language.Tag
|
||||
|
||||
func (s sorter) Len() int {
|
||||
return len(s)
|
||||
}
|
||||
|
||||
func (s sorter) Swap(i, j int) {
|
||||
s[i], s[j] = s[j], s[i]
|
||||
}
|
||||
|
||||
func (s sorter) Less(i, j int) bool {
|
||||
return s[i].String() < s[j].String()
|
||||
}
|
||||
|
||||
// UniqueTags sorts and filters duplicate tags in place and returns a slice with
|
||||
// only unique tags.
|
||||
func UniqueTags(tags []language.Tag) []language.Tag {
|
||||
if len(tags) <= 1 {
|
||||
return tags
|
||||
}
|
||||
SortTags(tags)
|
||||
k := 0
|
||||
for i := 1; i < len(tags); i++ {
|
||||
if tags[k].String() < tags[i].String() {
|
||||
k++
|
||||
tags[k] = tags[i]
|
||||
}
|
||||
}
|
||||
return tags[:k+1]
|
||||
}
|
||||
16
vendor/golang.org/x/text/internal/language/common.go
generated
vendored
Normal file
16
vendor/golang.org/x/text/internal/language/common.go
generated
vendored
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
|
||||
|
||||
package language
|
||||
|
||||
// This file contains code common to the maketables.go and the package code.
|
||||
|
||||
// AliasType is the type of an alias in AliasMap.
|
||||
type AliasType int8
|
||||
|
||||
const (
|
||||
Deprecated AliasType = iota
|
||||
Macro
|
||||
Legacy
|
||||
|
||||
AliasTypeUnknown AliasType = -1
|
||||
)
|
||||
29
vendor/golang.org/x/text/internal/language/compact.go
generated
vendored
Normal file
29
vendor/golang.org/x/text/internal/language/compact.go
generated
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package language
|
||||
|
||||
// CompactCoreInfo is a compact integer with the three core tags encoded.
|
||||
type CompactCoreInfo uint32
|
||||
|
||||
// GetCompactCore generates a uint32 value that is guaranteed to be unique for
|
||||
// different language, region, and script values.
|
||||
func GetCompactCore(t Tag) (cci CompactCoreInfo, ok bool) {
|
||||
if t.LangID > langNoIndexOffset {
|
||||
return 0, false
|
||||
}
|
||||
cci |= CompactCoreInfo(t.LangID) << (8 + 12)
|
||||
cci |= CompactCoreInfo(t.ScriptID) << 12
|
||||
cci |= CompactCoreInfo(t.RegionID)
|
||||
return cci, true
|
||||
}
|
||||
|
||||
// Tag generates a tag from c.
|
||||
func (c CompactCoreInfo) Tag() Tag {
|
||||
return Tag{
|
||||
LangID: Language(c >> 20),
|
||||
RegionID: Region(c & 0x3ff),
|
||||
ScriptID: Script(c>>12) & 0xff,
|
||||
}
|
||||
}
|
||||
61
vendor/golang.org/x/text/internal/language/compact/compact.go
generated
vendored
Normal file
61
vendor/golang.org/x/text/internal/language/compact/compact.go
generated
vendored
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package compact defines a compact representation of language tags.
|
||||
//
|
||||
// Common language tags (at least all for which locale information is defined
|
||||
// in CLDR) are assigned a unique index. Each Tag is associated with such an
|
||||
// ID for selecting language-related resources (such as translations) as well
|
||||
// as one for selecting regional defaults (currency, number formatting, etc.)
|
||||
//
|
||||
// It may want to export this functionality at some point, but at this point
|
||||
// this is only available for use within x/text.
|
||||
package compact // import "golang.org/x/text/internal/language/compact"
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/text/internal/language"
|
||||
)
|
||||
|
||||
// ID is an integer identifying a single tag.
|
||||
type ID uint16
|
||||
|
||||
func getCoreIndex(t language.Tag) (id ID, ok bool) {
|
||||
cci, ok := language.GetCompactCore(t)
|
||||
if !ok {
|
||||
return 0, false
|
||||
}
|
||||
i := sort.Search(len(coreTags), func(i int) bool {
|
||||
return cci <= coreTags[i]
|
||||
})
|
||||
if i == len(coreTags) || coreTags[i] != cci {
|
||||
return 0, false
|
||||
}
|
||||
return ID(i), true
|
||||
}
|
||||
|
||||
// Parent returns the ID of the parent or the root ID if id is already the root.
|
||||
func (id ID) Parent() ID {
|
||||
return parents[id]
|
||||
}
|
||||
|
||||
// Tag converts id to an internal language Tag.
|
||||
func (id ID) Tag() language.Tag {
|
||||
if int(id) >= len(coreTags) {
|
||||
return specialTags[int(id)-len(coreTags)]
|
||||
}
|
||||
return coreTags[id].Tag()
|
||||
}
|
||||
|
||||
var specialTags []language.Tag
|
||||
|
||||
func init() {
|
||||
tags := strings.Split(specialTagsStr, " ")
|
||||
specialTags = make([]language.Tag, len(tags))
|
||||
for i, t := range tags {
|
||||
specialTags[i] = language.MustParse(t)
|
||||
}
|
||||
}
|
||||
260
vendor/golang.org/x/text/internal/language/compact/language.go
generated
vendored
Normal file
260
vendor/golang.org/x/text/internal/language/compact/language.go
generated
vendored
Normal file
|
|
@ -0,0 +1,260 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:generate go run gen.go gen_index.go -output tables.go
|
||||
//go:generate go run gen_parents.go
|
||||
|
||||
package compact
|
||||
|
||||
// TODO: Remove above NOTE after:
|
||||
// - verifying that tables are dropped correctly (most notably matcher tables).
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"golang.org/x/text/internal/language"
|
||||
)
|
||||
|
||||
// Tag represents a BCP 47 language tag. It is used to specify an instance of a
|
||||
// specific language or locale. All language tag values are guaranteed to be
|
||||
// well-formed.
|
||||
type Tag struct {
|
||||
// NOTE: exported tags will become part of the public API.
|
||||
language ID
|
||||
locale ID
|
||||
full fullTag // always a language.Tag for now.
|
||||
}
|
||||
|
||||
const _und = 0
|
||||
|
||||
type fullTag interface {
|
||||
IsRoot() bool
|
||||
Parent() language.Tag
|
||||
}
|
||||
|
||||
// Make a compact Tag from a fully specified internal language Tag.
|
||||
func Make(t language.Tag) (tag Tag) {
|
||||
if region := t.TypeForKey("rg"); len(region) == 6 && region[2:] == "zzzz" {
|
||||
if r, err := language.ParseRegion(region[:2]); err == nil {
|
||||
tFull := t
|
||||
t, _ = t.SetTypeForKey("rg", "")
|
||||
// TODO: should we not consider "va" for the language tag?
|
||||
var exact1, exact2 bool
|
||||
tag.language, exact1 = FromTag(t)
|
||||
t.RegionID = r
|
||||
tag.locale, exact2 = FromTag(t)
|
||||
if !exact1 || !exact2 {
|
||||
tag.full = tFull
|
||||
}
|
||||
return tag
|
||||
}
|
||||
}
|
||||
lang, ok := FromTag(t)
|
||||
tag.language = lang
|
||||
tag.locale = lang
|
||||
if !ok {
|
||||
tag.full = t
|
||||
}
|
||||
return tag
|
||||
}
|
||||
|
||||
// Tag returns an internal language Tag version of this tag.
|
||||
func (t Tag) Tag() language.Tag {
|
||||
if t.full != nil {
|
||||
return t.full.(language.Tag)
|
||||
}
|
||||
tag := t.language.Tag()
|
||||
if t.language != t.locale {
|
||||
loc := t.locale.Tag()
|
||||
tag, _ = tag.SetTypeForKey("rg", strings.ToLower(loc.RegionID.String())+"zzzz")
|
||||
}
|
||||
return tag
|
||||
}
|
||||
|
||||
// IsCompact reports whether this tag is fully defined in terms of ID.
|
||||
func (t *Tag) IsCompact() bool {
|
||||
return t.full == nil
|
||||
}
|
||||
|
||||
// MayHaveVariants reports whether a tag may have variants. If it returns false
|
||||
// it is guaranteed the tag does not have variants.
|
||||
func (t Tag) MayHaveVariants() bool {
|
||||
return t.full != nil || int(t.language) >= len(coreTags)
|
||||
}
|
||||
|
||||
// MayHaveExtensions reports whether a tag may have extensions. If it returns
|
||||
// false it is guaranteed the tag does not have them.
|
||||
func (t Tag) MayHaveExtensions() bool {
|
||||
return t.full != nil ||
|
||||
int(t.language) >= len(coreTags) ||
|
||||
t.language != t.locale
|
||||
}
|
||||
|
||||
// IsRoot returns true if t is equal to language "und".
|
||||
func (t Tag) IsRoot() bool {
|
||||
if t.full != nil {
|
||||
return t.full.IsRoot()
|
||||
}
|
||||
return t.language == _und
|
||||
}
|
||||
|
||||
// Parent returns the CLDR parent of t. In CLDR, missing fields in data for a
|
||||
// specific language are substituted with fields from the parent language.
|
||||
// The parent for a language may change for newer versions of CLDR.
|
||||
func (t Tag) Parent() Tag {
|
||||
if t.full != nil {
|
||||
return Make(t.full.Parent())
|
||||
}
|
||||
if t.language != t.locale {
|
||||
// Simulate stripping -u-rg-xxxxxx
|
||||
return Tag{language: t.language, locale: t.language}
|
||||
}
|
||||
// TODO: use parent lookup table once cycle from internal package is
|
||||
// removed. Probably by internalizing the table and declaring this fast
|
||||
// enough.
|
||||
// lang := compactID(internal.Parent(uint16(t.language)))
|
||||
lang, _ := FromTag(t.language.Tag().Parent())
|
||||
return Tag{language: lang, locale: lang}
|
||||
}
|
||||
|
||||
// nextToken returns token t and the rest of the string.
|
||||
func nextToken(s string) (t, tail string) {
|
||||
p := strings.Index(s[1:], "-")
|
||||
if p == -1 {
|
||||
return s[1:], ""
|
||||
}
|
||||
p++
|
||||
return s[1:p], s[p:]
|
||||
}
|
||||
|
||||
// LanguageID returns an index, where 0 <= index < NumCompactTags, for tags
|
||||
// for which data exists in the text repository.The index will change over time
|
||||
// and should not be stored in persistent storage. If t does not match a compact
|
||||
// index, exact will be false and the compact index will be returned for the
|
||||
// first match after repeatedly taking the Parent of t.
|
||||
func LanguageID(t Tag) (id ID, exact bool) {
|
||||
return t.language, t.full == nil
|
||||
}
|
||||
|
||||
// RegionalID returns the ID for the regional variant of this tag. This index is
|
||||
// used to indicate region-specific overrides, such as default currency, default
|
||||
// calendar and week data, default time cycle, and default measurement system
|
||||
// and unit preferences.
|
||||
//
|
||||
// For instance, the tag en-GB-u-rg-uszzzz specifies British English with US
|
||||
// settings for currency, number formatting, etc. The CompactIndex for this tag
|
||||
// will be that for en-GB, while the RegionalID will be the one corresponding to
|
||||
// en-US.
|
||||
func RegionalID(t Tag) (id ID, exact bool) {
|
||||
return t.locale, t.full == nil
|
||||
}
|
||||
|
||||
// LanguageTag returns t stripped of regional variant indicators.
|
||||
//
|
||||
// At the moment this means it is stripped of a regional and variant subtag "rg"
|
||||
// and "va" in the "u" extension.
|
||||
func (t Tag) LanguageTag() Tag {
|
||||
if t.full == nil {
|
||||
return Tag{language: t.language, locale: t.language}
|
||||
}
|
||||
tt := t.Tag()
|
||||
tt.SetTypeForKey("rg", "")
|
||||
tt.SetTypeForKey("va", "")
|
||||
return Make(tt)
|
||||
}
|
||||
|
||||
// RegionalTag returns the regional variant of the tag.
|
||||
//
|
||||
// At the moment this means that the region is set from the regional subtag
|
||||
// "rg" in the "u" extension.
|
||||
func (t Tag) RegionalTag() Tag {
|
||||
rt := Tag{language: t.locale, locale: t.locale}
|
||||
if t.full == nil {
|
||||
return rt
|
||||
}
|
||||
b := language.Builder{}
|
||||
tag := t.Tag()
|
||||
// tag, _ = tag.SetTypeForKey("rg", "")
|
||||
b.SetTag(t.locale.Tag())
|
||||
if v := tag.Variants(); v != "" {
|
||||
for _, v := range strings.Split(v, "-") {
|
||||
b.AddVariant(v)
|
||||
}
|
||||
}
|
||||
for _, e := range tag.Extensions() {
|
||||
b.AddExt(e)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// FromTag reports closest matching ID for an internal language Tag.
|
||||
func FromTag(t language.Tag) (id ID, exact bool) {
|
||||
// TODO: perhaps give more frequent tags a lower index.
|
||||
// TODO: we could make the indexes stable. This will excluded some
|
||||
// possibilities for optimization, so don't do this quite yet.
|
||||
exact = true
|
||||
|
||||
b, s, r := t.Raw()
|
||||
if t.HasString() {
|
||||
if t.IsPrivateUse() {
|
||||
// We have no entries for user-defined tags.
|
||||
return 0, false
|
||||
}
|
||||
hasExtra := false
|
||||
if t.HasVariants() {
|
||||
if t.HasExtensions() {
|
||||
build := language.Builder{}
|
||||
build.SetTag(language.Tag{LangID: b, ScriptID: s, RegionID: r})
|
||||
build.AddVariant(t.Variants())
|
||||
exact = false
|
||||
t = build.Make()
|
||||
}
|
||||
hasExtra = true
|
||||
} else if _, ok := t.Extension('u'); ok {
|
||||
// TODO: va may mean something else. Consider not considering it.
|
||||
// Strip all but the 'va' entry.
|
||||
old := t
|
||||
variant := t.TypeForKey("va")
|
||||
t = language.Tag{LangID: b, ScriptID: s, RegionID: r}
|
||||
if variant != "" {
|
||||
t, _ = t.SetTypeForKey("va", variant)
|
||||
hasExtra = true
|
||||
}
|
||||
exact = old == t
|
||||
} else {
|
||||
exact = false
|
||||
}
|
||||
if hasExtra {
|
||||
// We have some variants.
|
||||
for i, s := range specialTags {
|
||||
if s == t {
|
||||
return ID(i + len(coreTags)), exact
|
||||
}
|
||||
}
|
||||
exact = false
|
||||
}
|
||||
}
|
||||
if x, ok := getCoreIndex(t); ok {
|
||||
return x, exact
|
||||
}
|
||||
exact = false
|
||||
if r != 0 && s == 0 {
|
||||
// Deal with cases where an extra script is inserted for the region.
|
||||
t, _ := t.Maximize()
|
||||
if x, ok := getCoreIndex(t); ok {
|
||||
return x, exact
|
||||
}
|
||||
}
|
||||
for t = t.Parent(); t != root; t = t.Parent() {
|
||||
// No variants specified: just compare core components.
|
||||
// The key has the form lllssrrr, where l, s, and r are nibbles for
|
||||
// respectively the langID, scriptID, and regionID.
|
||||
if x, ok := getCoreIndex(t); ok {
|
||||
return x, exact
|
||||
}
|
||||
}
|
||||
return 0, exact
|
||||
}
|
||||
|
||||
var root = language.Tag{}
|
||||
120
vendor/golang.org/x/text/internal/language/compact/parents.go
generated
vendored
Normal file
120
vendor/golang.org/x/text/internal/language/compact/parents.go
generated
vendored
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
|
||||
|
||||
package compact
|
||||
|
||||
// parents maps a compact index of a tag to the compact index of the parent of
|
||||
// this tag.
|
||||
var parents = []ID{ // 775 elements
|
||||
// Entry 0 - 3F
|
||||
0x0000, 0x0000, 0x0001, 0x0001, 0x0000, 0x0004, 0x0000, 0x0006,
|
||||
0x0000, 0x0008, 0x0000, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a,
|
||||
0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a,
|
||||
0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a,
|
||||
0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x000a, 0x0000,
|
||||
0x0000, 0x0028, 0x0000, 0x002a, 0x0000, 0x002c, 0x0000, 0x0000,
|
||||
0x002f, 0x002e, 0x002e, 0x0000, 0x0033, 0x0000, 0x0035, 0x0000,
|
||||
0x0037, 0x0000, 0x0039, 0x0000, 0x003b, 0x0000, 0x0000, 0x003e,
|
||||
// Entry 40 - 7F
|
||||
0x0000, 0x0040, 0x0040, 0x0000, 0x0043, 0x0043, 0x0000, 0x0046,
|
||||
0x0000, 0x0048, 0x0000, 0x0000, 0x004b, 0x004a, 0x004a, 0x0000,
|
||||
0x004f, 0x004f, 0x004f, 0x004f, 0x0000, 0x0054, 0x0054, 0x0000,
|
||||
0x0057, 0x0000, 0x0059, 0x0000, 0x005b, 0x0000, 0x005d, 0x005d,
|
||||
0x0000, 0x0060, 0x0000, 0x0062, 0x0000, 0x0064, 0x0000, 0x0066,
|
||||
0x0066, 0x0000, 0x0069, 0x0000, 0x006b, 0x006b, 0x006b, 0x006b,
|
||||
0x006b, 0x006b, 0x006b, 0x0000, 0x0073, 0x0000, 0x0075, 0x0000,
|
||||
0x0077, 0x0000, 0x0000, 0x007a, 0x0000, 0x007c, 0x0000, 0x007e,
|
||||
// Entry 80 - BF
|
||||
0x0000, 0x0080, 0x0080, 0x0000, 0x0083, 0x0083, 0x0000, 0x0086,
|
||||
0x0087, 0x0087, 0x0087, 0x0086, 0x0088, 0x0087, 0x0087, 0x0087,
|
||||
0x0086, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0088,
|
||||
0x0087, 0x0087, 0x0087, 0x0087, 0x0088, 0x0087, 0x0088, 0x0087,
|
||||
0x0087, 0x0088, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
|
||||
0x0087, 0x0087, 0x0087, 0x0086, 0x0087, 0x0087, 0x0087, 0x0087,
|
||||
0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
|
||||
0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0086, 0x0087, 0x0086,
|
||||
// Entry C0 - FF
|
||||
0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
|
||||
0x0088, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
|
||||
0x0086, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0088, 0x0087,
|
||||
0x0087, 0x0088, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087,
|
||||
0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0086, 0x0086, 0x0087,
|
||||
0x0087, 0x0086, 0x0087, 0x0087, 0x0087, 0x0087, 0x0087, 0x0000,
|
||||
0x00ef, 0x0000, 0x00f1, 0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f2,
|
||||
0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f1, 0x00f2, 0x00f1, 0x00f1,
|
||||
// Entry 100 - 13F
|
||||
0x00f2, 0x00f2, 0x00f1, 0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f1,
|
||||
0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x00f2, 0x0000, 0x010e,
|
||||
0x0000, 0x0110, 0x0000, 0x0112, 0x0000, 0x0114, 0x0114, 0x0000,
|
||||
0x0117, 0x0117, 0x0117, 0x0117, 0x0000, 0x011c, 0x0000, 0x011e,
|
||||
0x0000, 0x0120, 0x0120, 0x0000, 0x0123, 0x0123, 0x0123, 0x0123,
|
||||
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
|
||||
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
|
||||
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
|
||||
// Entry 140 - 17F
|
||||
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
|
||||
0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123, 0x0123,
|
||||
0x0123, 0x0123, 0x0000, 0x0152, 0x0000, 0x0154, 0x0000, 0x0156,
|
||||
0x0000, 0x0158, 0x0000, 0x015a, 0x0000, 0x015c, 0x015c, 0x015c,
|
||||
0x0000, 0x0160, 0x0000, 0x0000, 0x0163, 0x0000, 0x0165, 0x0000,
|
||||
0x0167, 0x0167, 0x0167, 0x0000, 0x016b, 0x0000, 0x016d, 0x0000,
|
||||
0x016f, 0x0000, 0x0171, 0x0171, 0x0000, 0x0174, 0x0000, 0x0176,
|
||||
0x0000, 0x0178, 0x0000, 0x017a, 0x0000, 0x017c, 0x0000, 0x017e,
|
||||
// Entry 180 - 1BF
|
||||
0x0000, 0x0000, 0x0000, 0x0182, 0x0000, 0x0184, 0x0184, 0x0184,
|
||||
0x0184, 0x0000, 0x0000, 0x0000, 0x018b, 0x0000, 0x0000, 0x018e,
|
||||
0x0000, 0x0000, 0x0191, 0x0000, 0x0000, 0x0000, 0x0195, 0x0000,
|
||||
0x0197, 0x0000, 0x0000, 0x019a, 0x0000, 0x0000, 0x019d, 0x0000,
|
||||
0x019f, 0x0000, 0x01a1, 0x0000, 0x01a3, 0x0000, 0x01a5, 0x0000,
|
||||
0x01a7, 0x0000, 0x01a9, 0x0000, 0x01ab, 0x0000, 0x01ad, 0x0000,
|
||||
0x01af, 0x0000, 0x01b1, 0x01b1, 0x0000, 0x01b4, 0x0000, 0x01b6,
|
||||
0x0000, 0x01b8, 0x0000, 0x01ba, 0x0000, 0x01bc, 0x0000, 0x0000,
|
||||
// Entry 1C0 - 1FF
|
||||
0x01bf, 0x0000, 0x01c1, 0x0000, 0x01c3, 0x0000, 0x01c5, 0x0000,
|
||||
0x01c7, 0x0000, 0x01c9, 0x0000, 0x01cb, 0x01cb, 0x01cb, 0x01cb,
|
||||
0x0000, 0x01d0, 0x0000, 0x01d2, 0x01d2, 0x0000, 0x01d5, 0x0000,
|
||||
0x01d7, 0x0000, 0x01d9, 0x0000, 0x01db, 0x0000, 0x01dd, 0x0000,
|
||||
0x01df, 0x01df, 0x0000, 0x01e2, 0x0000, 0x01e4, 0x0000, 0x01e6,
|
||||
0x0000, 0x01e8, 0x0000, 0x01ea, 0x0000, 0x01ec, 0x0000, 0x01ee,
|
||||
0x0000, 0x01f0, 0x0000, 0x0000, 0x01f3, 0x0000, 0x01f5, 0x01f5,
|
||||
0x01f5, 0x0000, 0x01f9, 0x0000, 0x01fb, 0x0000, 0x01fd, 0x0000,
|
||||
// Entry 200 - 23F
|
||||
0x01ff, 0x0000, 0x0000, 0x0202, 0x0000, 0x0204, 0x0204, 0x0000,
|
||||
0x0207, 0x0000, 0x0209, 0x0209, 0x0000, 0x020c, 0x020c, 0x0000,
|
||||
0x020f, 0x020f, 0x020f, 0x020f, 0x020f, 0x020f, 0x020f, 0x0000,
|
||||
0x0217, 0x0000, 0x0219, 0x0000, 0x021b, 0x0000, 0x0000, 0x0000,
|
||||
0x0000, 0x0000, 0x0221, 0x0000, 0x0000, 0x0224, 0x0000, 0x0226,
|
||||
0x0226, 0x0000, 0x0229, 0x0000, 0x022b, 0x022b, 0x0000, 0x0000,
|
||||
0x022f, 0x022e, 0x022e, 0x0000, 0x0000, 0x0234, 0x0000, 0x0236,
|
||||
0x0000, 0x0238, 0x0000, 0x0244, 0x023a, 0x0244, 0x0244, 0x0244,
|
||||
// Entry 240 - 27F
|
||||
0x0244, 0x0244, 0x0244, 0x0244, 0x023a, 0x0244, 0x0244, 0x0000,
|
||||
0x0247, 0x0247, 0x0247, 0x0000, 0x024b, 0x0000, 0x024d, 0x0000,
|
||||
0x024f, 0x024f, 0x0000, 0x0252, 0x0000, 0x0254, 0x0254, 0x0254,
|
||||
0x0254, 0x0254, 0x0254, 0x0000, 0x025b, 0x0000, 0x025d, 0x0000,
|
||||
0x025f, 0x0000, 0x0261, 0x0000, 0x0263, 0x0000, 0x0265, 0x0000,
|
||||
0x0000, 0x0268, 0x0268, 0x0268, 0x0000, 0x026c, 0x0000, 0x026e,
|
||||
0x0000, 0x0270, 0x0000, 0x0000, 0x0000, 0x0274, 0x0273, 0x0273,
|
||||
0x0000, 0x0278, 0x0000, 0x027a, 0x0000, 0x027c, 0x0000, 0x0000,
|
||||
// Entry 280 - 2BF
|
||||
0x0000, 0x0000, 0x0281, 0x0000, 0x0000, 0x0284, 0x0000, 0x0286,
|
||||
0x0286, 0x0286, 0x0286, 0x0000, 0x028b, 0x028b, 0x028b, 0x0000,
|
||||
0x028f, 0x028f, 0x028f, 0x028f, 0x028f, 0x0000, 0x0295, 0x0295,
|
||||
0x0295, 0x0295, 0x0000, 0x0000, 0x0000, 0x0000, 0x029d, 0x029d,
|
||||
0x029d, 0x0000, 0x02a1, 0x02a1, 0x02a1, 0x02a1, 0x0000, 0x0000,
|
||||
0x02a7, 0x02a7, 0x02a7, 0x02a7, 0x0000, 0x02ac, 0x0000, 0x02ae,
|
||||
0x02ae, 0x0000, 0x02b1, 0x0000, 0x02b3, 0x0000, 0x02b5, 0x02b5,
|
||||
0x0000, 0x0000, 0x02b9, 0x0000, 0x0000, 0x0000, 0x02bd, 0x0000,
|
||||
// Entry 2C0 - 2FF
|
||||
0x02bf, 0x02bf, 0x0000, 0x0000, 0x02c3, 0x0000, 0x02c5, 0x0000,
|
||||
0x02c7, 0x0000, 0x02c9, 0x0000, 0x02cb, 0x0000, 0x02cd, 0x02cd,
|
||||
0x0000, 0x0000, 0x02d1, 0x0000, 0x02d3, 0x02d0, 0x02d0, 0x0000,
|
||||
0x0000, 0x02d8, 0x02d7, 0x02d7, 0x0000, 0x0000, 0x02dd, 0x0000,
|
||||
0x02df, 0x0000, 0x02e1, 0x0000, 0x0000, 0x02e4, 0x0000, 0x02e6,
|
||||
0x0000, 0x0000, 0x02e9, 0x0000, 0x02eb, 0x0000, 0x02ed, 0x0000,
|
||||
0x02ef, 0x02ef, 0x0000, 0x0000, 0x02f3, 0x02f2, 0x02f2, 0x0000,
|
||||
0x02f7, 0x0000, 0x02f9, 0x02f9, 0x02f9, 0x02f9, 0x02f9, 0x0000,
|
||||
// Entry 300 - 33F
|
||||
0x02ff, 0x0300, 0x02ff, 0x0000, 0x0303, 0x0051, 0x00e6,
|
||||
} // Size: 1574 bytes
|
||||
|
||||
// Total table size 1574 bytes (1KiB); checksum: 895AAF0B
|
||||
1015
vendor/golang.org/x/text/internal/language/compact/tables.go
generated
vendored
Normal file
1015
vendor/golang.org/x/text/internal/language/compact/tables.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
91
vendor/golang.org/x/text/internal/language/compact/tags.go
generated
vendored
Normal file
91
vendor/golang.org/x/text/internal/language/compact/tags.go
generated
vendored
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package compact
|
||||
|
||||
var (
|
||||
und = Tag{}
|
||||
|
||||
Und Tag = Tag{}
|
||||
|
||||
Afrikaans Tag = Tag{language: afIndex, locale: afIndex}
|
||||
Amharic Tag = Tag{language: amIndex, locale: amIndex}
|
||||
Arabic Tag = Tag{language: arIndex, locale: arIndex}
|
||||
ModernStandardArabic Tag = Tag{language: ar001Index, locale: ar001Index}
|
||||
Azerbaijani Tag = Tag{language: azIndex, locale: azIndex}
|
||||
Bulgarian Tag = Tag{language: bgIndex, locale: bgIndex}
|
||||
Bengali Tag = Tag{language: bnIndex, locale: bnIndex}
|
||||
Catalan Tag = Tag{language: caIndex, locale: caIndex}
|
||||
Czech Tag = Tag{language: csIndex, locale: csIndex}
|
||||
Danish Tag = Tag{language: daIndex, locale: daIndex}
|
||||
German Tag = Tag{language: deIndex, locale: deIndex}
|
||||
Greek Tag = Tag{language: elIndex, locale: elIndex}
|
||||
English Tag = Tag{language: enIndex, locale: enIndex}
|
||||
AmericanEnglish Tag = Tag{language: enUSIndex, locale: enUSIndex}
|
||||
BritishEnglish Tag = Tag{language: enGBIndex, locale: enGBIndex}
|
||||
Spanish Tag = Tag{language: esIndex, locale: esIndex}
|
||||
EuropeanSpanish Tag = Tag{language: esESIndex, locale: esESIndex}
|
||||
LatinAmericanSpanish Tag = Tag{language: es419Index, locale: es419Index}
|
||||
Estonian Tag = Tag{language: etIndex, locale: etIndex}
|
||||
Persian Tag = Tag{language: faIndex, locale: faIndex}
|
||||
Finnish Tag = Tag{language: fiIndex, locale: fiIndex}
|
||||
Filipino Tag = Tag{language: filIndex, locale: filIndex}
|
||||
French Tag = Tag{language: frIndex, locale: frIndex}
|
||||
CanadianFrench Tag = Tag{language: frCAIndex, locale: frCAIndex}
|
||||
Gujarati Tag = Tag{language: guIndex, locale: guIndex}
|
||||
Hebrew Tag = Tag{language: heIndex, locale: heIndex}
|
||||
Hindi Tag = Tag{language: hiIndex, locale: hiIndex}
|
||||
Croatian Tag = Tag{language: hrIndex, locale: hrIndex}
|
||||
Hungarian Tag = Tag{language: huIndex, locale: huIndex}
|
||||
Armenian Tag = Tag{language: hyIndex, locale: hyIndex}
|
||||
Indonesian Tag = Tag{language: idIndex, locale: idIndex}
|
||||
Icelandic Tag = Tag{language: isIndex, locale: isIndex}
|
||||
Italian Tag = Tag{language: itIndex, locale: itIndex}
|
||||
Japanese Tag = Tag{language: jaIndex, locale: jaIndex}
|
||||
Georgian Tag = Tag{language: kaIndex, locale: kaIndex}
|
||||
Kazakh Tag = Tag{language: kkIndex, locale: kkIndex}
|
||||
Khmer Tag = Tag{language: kmIndex, locale: kmIndex}
|
||||
Kannada Tag = Tag{language: knIndex, locale: knIndex}
|
||||
Korean Tag = Tag{language: koIndex, locale: koIndex}
|
||||
Kirghiz Tag = Tag{language: kyIndex, locale: kyIndex}
|
||||
Lao Tag = Tag{language: loIndex, locale: loIndex}
|
||||
Lithuanian Tag = Tag{language: ltIndex, locale: ltIndex}
|
||||
Latvian Tag = Tag{language: lvIndex, locale: lvIndex}
|
||||
Macedonian Tag = Tag{language: mkIndex, locale: mkIndex}
|
||||
Malayalam Tag = Tag{language: mlIndex, locale: mlIndex}
|
||||
Mongolian Tag = Tag{language: mnIndex, locale: mnIndex}
|
||||
Marathi Tag = Tag{language: mrIndex, locale: mrIndex}
|
||||
Malay Tag = Tag{language: msIndex, locale: msIndex}
|
||||
Burmese Tag = Tag{language: myIndex, locale: myIndex}
|
||||
Nepali Tag = Tag{language: neIndex, locale: neIndex}
|
||||
Dutch Tag = Tag{language: nlIndex, locale: nlIndex}
|
||||
Norwegian Tag = Tag{language: noIndex, locale: noIndex}
|
||||
Punjabi Tag = Tag{language: paIndex, locale: paIndex}
|
||||
Polish Tag = Tag{language: plIndex, locale: plIndex}
|
||||
Portuguese Tag = Tag{language: ptIndex, locale: ptIndex}
|
||||
BrazilianPortuguese Tag = Tag{language: ptBRIndex, locale: ptBRIndex}
|
||||
EuropeanPortuguese Tag = Tag{language: ptPTIndex, locale: ptPTIndex}
|
||||
Romanian Tag = Tag{language: roIndex, locale: roIndex}
|
||||
Russian Tag = Tag{language: ruIndex, locale: ruIndex}
|
||||
Sinhala Tag = Tag{language: siIndex, locale: siIndex}
|
||||
Slovak Tag = Tag{language: skIndex, locale: skIndex}
|
||||
Slovenian Tag = Tag{language: slIndex, locale: slIndex}
|
||||
Albanian Tag = Tag{language: sqIndex, locale: sqIndex}
|
||||
Serbian Tag = Tag{language: srIndex, locale: srIndex}
|
||||
SerbianLatin Tag = Tag{language: srLatnIndex, locale: srLatnIndex}
|
||||
Swedish Tag = Tag{language: svIndex, locale: svIndex}
|
||||
Swahili Tag = Tag{language: swIndex, locale: swIndex}
|
||||
Tamil Tag = Tag{language: taIndex, locale: taIndex}
|
||||
Telugu Tag = Tag{language: teIndex, locale: teIndex}
|
||||
Thai Tag = Tag{language: thIndex, locale: thIndex}
|
||||
Turkish Tag = Tag{language: trIndex, locale: trIndex}
|
||||
Ukrainian Tag = Tag{language: ukIndex, locale: ukIndex}
|
||||
Urdu Tag = Tag{language: urIndex, locale: urIndex}
|
||||
Uzbek Tag = Tag{language: uzIndex, locale: uzIndex}
|
||||
Vietnamese Tag = Tag{language: viIndex, locale: viIndex}
|
||||
Chinese Tag = Tag{language: zhIndex, locale: zhIndex}
|
||||
SimplifiedChinese Tag = Tag{language: zhHansIndex, locale: zhHansIndex}
|
||||
TraditionalChinese Tag = Tag{language: zhHantIndex, locale: zhHantIndex}
|
||||
Zulu Tag = Tag{language: zuIndex, locale: zuIndex}
|
||||
)
|
||||
167
vendor/golang.org/x/text/internal/language/compose.go
generated
vendored
Normal file
167
vendor/golang.org/x/text/internal/language/compose.go
generated
vendored
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package language
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Builder allows constructing a Tag from individual components.
|
||||
// Its main user is Compose in the top-level language package.
|
||||
type Builder struct {
|
||||
Tag Tag
|
||||
|
||||
private string // the x extension
|
||||
variants []string
|
||||
extensions []string
|
||||
}
|
||||
|
||||
// Make returns a new Tag from the current settings.
|
||||
func (b *Builder) Make() Tag {
|
||||
t := b.Tag
|
||||
|
||||
if len(b.extensions) > 0 || len(b.variants) > 0 {
|
||||
sort.Sort(sortVariants(b.variants))
|
||||
sort.Strings(b.extensions)
|
||||
|
||||
if b.private != "" {
|
||||
b.extensions = append(b.extensions, b.private)
|
||||
}
|
||||
n := maxCoreSize + tokenLen(b.variants...) + tokenLen(b.extensions...)
|
||||
buf := make([]byte, n)
|
||||
p := t.genCoreBytes(buf)
|
||||
t.pVariant = byte(p)
|
||||
p += appendTokens(buf[p:], b.variants...)
|
||||
t.pExt = uint16(p)
|
||||
p += appendTokens(buf[p:], b.extensions...)
|
||||
t.str = string(buf[:p])
|
||||
// We may not always need to remake the string, but when or when not
|
||||
// to do so is rather tricky.
|
||||
scan := makeScanner(buf[:p])
|
||||
t, _ = parse(&scan, "")
|
||||
return t
|
||||
|
||||
} else if b.private != "" {
|
||||
t.str = b.private
|
||||
t.RemakeString()
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// SetTag copies all the settings from a given Tag. Any previously set values
|
||||
// are discarded.
|
||||
func (b *Builder) SetTag(t Tag) {
|
||||
b.Tag.LangID = t.LangID
|
||||
b.Tag.RegionID = t.RegionID
|
||||
b.Tag.ScriptID = t.ScriptID
|
||||
// TODO: optimize
|
||||
b.variants = b.variants[:0]
|
||||
if variants := t.Variants(); variants != "" {
|
||||
for _, vr := range strings.Split(variants[1:], "-") {
|
||||
b.variants = append(b.variants, vr)
|
||||
}
|
||||
}
|
||||
b.extensions, b.private = b.extensions[:0], ""
|
||||
for _, e := range t.Extensions() {
|
||||
b.AddExt(e)
|
||||
}
|
||||
}
|
||||
|
||||
// AddExt adds extension e to the tag. e must be a valid extension as returned
|
||||
// by Tag.Extension. If the extension already exists, it will be discarded,
|
||||
// except for a -u extension, where non-existing key-type pairs will added.
|
||||
func (b *Builder) AddExt(e string) {
|
||||
if e[0] == 'x' {
|
||||
if b.private == "" {
|
||||
b.private = e
|
||||
}
|
||||
return
|
||||
}
|
||||
for i, s := range b.extensions {
|
||||
if s[0] == e[0] {
|
||||
if e[0] == 'u' {
|
||||
b.extensions[i] += e[1:]
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
b.extensions = append(b.extensions, e)
|
||||
}
|
||||
|
||||
// SetExt sets the extension e to the tag. e must be a valid extension as
|
||||
// returned by Tag.Extension. If the extension already exists, it will be
|
||||
// overwritten, except for a -u extension, where the individual key-type pairs
|
||||
// will be set.
|
||||
func (b *Builder) SetExt(e string) {
|
||||
if e[0] == 'x' {
|
||||
b.private = e
|
||||
return
|
||||
}
|
||||
for i, s := range b.extensions {
|
||||
if s[0] == e[0] {
|
||||
if e[0] == 'u' {
|
||||
b.extensions[i] = e + s[1:]
|
||||
} else {
|
||||
b.extensions[i] = e
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
b.extensions = append(b.extensions, e)
|
||||
}
|
||||
|
||||
// AddVariant adds any number of variants.
|
||||
func (b *Builder) AddVariant(v ...string) {
|
||||
for _, v := range v {
|
||||
if v != "" {
|
||||
b.variants = append(b.variants, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ClearVariants removes any variants previously added, including those
|
||||
// copied from a Tag in SetTag.
|
||||
func (b *Builder) ClearVariants() {
|
||||
b.variants = b.variants[:0]
|
||||
}
|
||||
|
||||
// ClearExtensions removes any extensions previously added, including those
|
||||
// copied from a Tag in SetTag.
|
||||
func (b *Builder) ClearExtensions() {
|
||||
b.private = ""
|
||||
b.extensions = b.extensions[:0]
|
||||
}
|
||||
|
||||
func tokenLen(token ...string) (n int) {
|
||||
for _, t := range token {
|
||||
n += len(t) + 1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func appendTokens(b []byte, token ...string) int {
|
||||
p := 0
|
||||
for _, t := range token {
|
||||
b[p] = '-'
|
||||
copy(b[p+1:], t)
|
||||
p += 1 + len(t)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
type sortVariants []string
|
||||
|
||||
func (s sortVariants) Len() int {
|
||||
return len(s)
|
||||
}
|
||||
|
||||
func (s sortVariants) Swap(i, j int) {
|
||||
s[j], s[i] = s[i], s[j]
|
||||
}
|
||||
|
||||
func (s sortVariants) Less(i, j int) bool {
|
||||
return variantIndex[s[i]] < variantIndex[s[j]]
|
||||
}
|
||||
28
vendor/golang.org/x/text/internal/language/coverage.go
generated
vendored
Normal file
28
vendor/golang.org/x/text/internal/language/coverage.go
generated
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package language
|
||||
|
||||
// BaseLanguages returns the list of all supported base languages. It generates
|
||||
// the list by traversing the internal structures.
|
||||
func BaseLanguages() []Language {
|
||||
base := make([]Language, 0, NumLanguages)
|
||||
for i := 0; i < langNoIndexOffset; i++ {
|
||||
// We included "und" already for the value 0.
|
||||
if i != nonCanonicalUnd {
|
||||
base = append(base, Language(i))
|
||||
}
|
||||
}
|
||||
i := langNoIndexOffset
|
||||
for _, v := range langNoIndex {
|
||||
for k := 0; k < 8; k++ {
|
||||
if v&1 == 1 {
|
||||
base = append(base, Language(i))
|
||||
}
|
||||
v >>= 1
|
||||
i++
|
||||
}
|
||||
}
|
||||
return base
|
||||
}
|
||||
627
vendor/golang.org/x/text/internal/language/language.go
generated
vendored
Normal file
627
vendor/golang.org/x/text/internal/language/language.go
generated
vendored
Normal file
|
|
@ -0,0 +1,627 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:generate go run gen.go gen_common.go -output tables.go
|
||||
|
||||
package language // import "golang.org/x/text/internal/language"
|
||||
|
||||
// TODO: Remove above NOTE after:
|
||||
// - verifying that tables are dropped correctly (most notably matcher tables).
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
// maxCoreSize is the maximum size of a BCP 47 tag without variants and
|
||||
// extensions. Equals max lang (3) + script (4) + max reg (3) + 2 dashes.
|
||||
maxCoreSize = 12
|
||||
|
||||
// max99thPercentileSize is a somewhat arbitrary buffer size that presumably
|
||||
// is large enough to hold at least 99% of the BCP 47 tags.
|
||||
max99thPercentileSize = 32
|
||||
|
||||
// maxSimpleUExtensionSize is the maximum size of a -u extension with one
|
||||
// key-type pair. Equals len("-u-") + key (2) + dash + max value (8).
|
||||
maxSimpleUExtensionSize = 14
|
||||
)
|
||||
|
||||
// Tag represents a BCP 47 language tag. It is used to specify an instance of a
|
||||
// specific language or locale. All language tag values are guaranteed to be
|
||||
// well-formed. The zero value of Tag is Und.
|
||||
type Tag struct {
|
||||
// TODO: the following fields have the form TagTypeID. This name is chosen
|
||||
// to allow refactoring the public package without conflicting with its
|
||||
// Base, Script, and Region methods. Once the transition is fully completed
|
||||
// the ID can be stripped from the name.
|
||||
|
||||
LangID Language
|
||||
RegionID Region
|
||||
// TODO: we will soon run out of positions for ScriptID. Idea: instead of
|
||||
// storing lang, region, and ScriptID codes, store only the compact index and
|
||||
// have a lookup table from this code to its expansion. This greatly speeds
|
||||
// up table lookup, speed up common variant cases.
|
||||
// This will also immediately free up 3 extra bytes. Also, the pVariant
|
||||
// field can now be moved to the lookup table, as the compact index uniquely
|
||||
// determines the offset of a possible variant.
|
||||
ScriptID Script
|
||||
pVariant byte // offset in str, includes preceding '-'
|
||||
pExt uint16 // offset of first extension, includes preceding '-'
|
||||
|
||||
// str is the string representation of the Tag. It will only be used if the
|
||||
// tag has variants or extensions.
|
||||
str string
|
||||
}
|
||||
|
||||
// Make is a convenience wrapper for Parse that omits the error.
|
||||
// In case of an error, a sensible default is returned.
|
||||
func Make(s string) Tag {
|
||||
t, _ := Parse(s)
|
||||
return t
|
||||
}
|
||||
|
||||
// Raw returns the raw base language, script and region, without making an
|
||||
// attempt to infer their values.
|
||||
// TODO: consider removing
|
||||
func (t Tag) Raw() (b Language, s Script, r Region) {
|
||||
return t.LangID, t.ScriptID, t.RegionID
|
||||
}
|
||||
|
||||
// equalTags compares language, script and region subtags only.
|
||||
func (t Tag) equalTags(a Tag) bool {
|
||||
return t.LangID == a.LangID && t.ScriptID == a.ScriptID && t.RegionID == a.RegionID
|
||||
}
|
||||
|
||||
// IsRoot returns true if t is equal to language "und".
|
||||
func (t Tag) IsRoot() bool {
|
||||
if int(t.pVariant) < len(t.str) {
|
||||
return false
|
||||
}
|
||||
return t.equalTags(Und)
|
||||
}
|
||||
|
||||
// IsPrivateUse reports whether the Tag consists solely of an IsPrivateUse use
|
||||
// tag.
|
||||
func (t Tag) IsPrivateUse() bool {
|
||||
return t.str != "" && t.pVariant == 0
|
||||
}
|
||||
|
||||
// RemakeString is used to update t.str in case lang, script or region changed.
|
||||
// It is assumed that pExt and pVariant still point to the start of the
|
||||
// respective parts.
|
||||
func (t *Tag) RemakeString() {
|
||||
if t.str == "" {
|
||||
return
|
||||
}
|
||||
extra := t.str[t.pVariant:]
|
||||
if t.pVariant > 0 {
|
||||
extra = extra[1:]
|
||||
}
|
||||
if t.equalTags(Und) && strings.HasPrefix(extra, "x-") {
|
||||
t.str = extra
|
||||
t.pVariant = 0
|
||||
t.pExt = 0
|
||||
return
|
||||
}
|
||||
var buf [max99thPercentileSize]byte // avoid extra memory allocation in most cases.
|
||||
b := buf[:t.genCoreBytes(buf[:])]
|
||||
if extra != "" {
|
||||
diff := len(b) - int(t.pVariant)
|
||||
b = append(b, '-')
|
||||
b = append(b, extra...)
|
||||
t.pVariant = uint8(int(t.pVariant) + diff)
|
||||
t.pExt = uint16(int(t.pExt) + diff)
|
||||
} else {
|
||||
t.pVariant = uint8(len(b))
|
||||
t.pExt = uint16(len(b))
|
||||
}
|
||||
t.str = string(b)
|
||||
}
|
||||
|
||||
// genCoreBytes writes a string for the base languages, script and region tags
|
||||
// to the given buffer and returns the number of bytes written. It will never
|
||||
// write more than maxCoreSize bytes.
|
||||
func (t *Tag) genCoreBytes(buf []byte) int {
|
||||
n := t.LangID.StringToBuf(buf[:])
|
||||
if t.ScriptID != 0 {
|
||||
n += copy(buf[n:], "-")
|
||||
n += copy(buf[n:], t.ScriptID.String())
|
||||
}
|
||||
if t.RegionID != 0 {
|
||||
n += copy(buf[n:], "-")
|
||||
n += copy(buf[n:], t.RegionID.String())
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// String returns the canonical string representation of the language tag.
|
||||
func (t Tag) String() string {
|
||||
if t.str != "" {
|
||||
return t.str
|
||||
}
|
||||
if t.ScriptID == 0 && t.RegionID == 0 {
|
||||
return t.LangID.String()
|
||||
}
|
||||
buf := [maxCoreSize]byte{}
|
||||
return string(buf[:t.genCoreBytes(buf[:])])
|
||||
}
|
||||
|
||||
// MarshalText implements encoding.TextMarshaler.
|
||||
func (t Tag) MarshalText() (text []byte, err error) {
|
||||
if t.str != "" {
|
||||
text = append(text, t.str...)
|
||||
} else if t.ScriptID == 0 && t.RegionID == 0 {
|
||||
text = append(text, t.LangID.String()...)
|
||||
} else {
|
||||
buf := [maxCoreSize]byte{}
|
||||
text = buf[:t.genCoreBytes(buf[:])]
|
||||
}
|
||||
return text, nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements encoding.TextUnmarshaler.
|
||||
func (t *Tag) UnmarshalText(text []byte) error {
|
||||
tag, err := Parse(string(text))
|
||||
*t = tag
|
||||
return err
|
||||
}
|
||||
|
||||
// Variants returns the part of the tag holding all variants or the empty string
|
||||
// if there are no variants defined.
|
||||
func (t Tag) Variants() string {
|
||||
if t.pVariant == 0 {
|
||||
return ""
|
||||
}
|
||||
return t.str[t.pVariant:t.pExt]
|
||||
}
|
||||
|
||||
// VariantOrPrivateUseTags returns variants or private use tags.
|
||||
func (t Tag) VariantOrPrivateUseTags() string {
|
||||
if t.pExt > 0 {
|
||||
return t.str[t.pVariant:t.pExt]
|
||||
}
|
||||
return t.str[t.pVariant:]
|
||||
}
|
||||
|
||||
// HasString reports whether this tag defines more than just the raw
|
||||
// components.
|
||||
func (t Tag) HasString() bool {
|
||||
return t.str != ""
|
||||
}
|
||||
|
||||
// Parent returns the CLDR parent of t. In CLDR, missing fields in data for a
|
||||
// specific language are substituted with fields from the parent language.
|
||||
// The parent for a language may change for newer versions of CLDR.
|
||||
func (t Tag) Parent() Tag {
|
||||
if t.str != "" {
|
||||
// Strip the variants and extensions.
|
||||
b, s, r := t.Raw()
|
||||
t = Tag{LangID: b, ScriptID: s, RegionID: r}
|
||||
if t.RegionID == 0 && t.ScriptID != 0 && t.LangID != 0 {
|
||||
base, _ := addTags(Tag{LangID: t.LangID})
|
||||
if base.ScriptID == t.ScriptID {
|
||||
return Tag{LangID: t.LangID}
|
||||
}
|
||||
}
|
||||
return t
|
||||
}
|
||||
if t.LangID != 0 {
|
||||
if t.RegionID != 0 {
|
||||
maxScript := t.ScriptID
|
||||
if maxScript == 0 {
|
||||
max, _ := addTags(t)
|
||||
maxScript = max.ScriptID
|
||||
}
|
||||
|
||||
for i := range parents {
|
||||
if Language(parents[i].lang) == t.LangID && Script(parents[i].maxScript) == maxScript {
|
||||
for _, r := range parents[i].fromRegion {
|
||||
if Region(r) == t.RegionID {
|
||||
return Tag{
|
||||
LangID: t.LangID,
|
||||
ScriptID: Script(parents[i].script),
|
||||
RegionID: Region(parents[i].toRegion),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Strip the script if it is the default one.
|
||||
base, _ := addTags(Tag{LangID: t.LangID})
|
||||
if base.ScriptID != maxScript {
|
||||
return Tag{LangID: t.LangID, ScriptID: maxScript}
|
||||
}
|
||||
return Tag{LangID: t.LangID}
|
||||
} else if t.ScriptID != 0 {
|
||||
// The parent for an base-script pair with a non-default script is
|
||||
// "und" instead of the base language.
|
||||
base, _ := addTags(Tag{LangID: t.LangID})
|
||||
if base.ScriptID != t.ScriptID {
|
||||
return Und
|
||||
}
|
||||
return Tag{LangID: t.LangID}
|
||||
}
|
||||
}
|
||||
return Und
|
||||
}
|
||||
|
||||
// ParseExtension parses s as an extension and returns it on success.
|
||||
func ParseExtension(s string) (ext string, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
ext = ""
|
||||
err = ErrSyntax
|
||||
}
|
||||
}()
|
||||
|
||||
scan := makeScannerString(s)
|
||||
var end int
|
||||
if n := len(scan.token); n != 1 {
|
||||
return "", ErrSyntax
|
||||
}
|
||||
scan.toLower(0, len(scan.b))
|
||||
end = parseExtension(&scan)
|
||||
if end != len(s) {
|
||||
return "", ErrSyntax
|
||||
}
|
||||
return string(scan.b), nil
|
||||
}
|
||||
|
||||
// HasVariants reports whether t has variants.
|
||||
func (t Tag) HasVariants() bool {
|
||||
return uint16(t.pVariant) < t.pExt
|
||||
}
|
||||
|
||||
// HasExtensions reports whether t has extensions.
|
||||
func (t Tag) HasExtensions() bool {
|
||||
return int(t.pExt) < len(t.str)
|
||||
}
|
||||
|
||||
// Extension returns the extension of type x for tag t. It will return
|
||||
// false for ok if t does not have the requested extension. The returned
|
||||
// extension will be invalid in this case.
|
||||
func (t Tag) Extension(x byte) (ext string, ok bool) {
|
||||
for i := int(t.pExt); i < len(t.str)-1; {
|
||||
var ext string
|
||||
i, ext = getExtension(t.str, i)
|
||||
if ext[0] == x {
|
||||
return ext, true
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
// Extensions returns all extensions of t.
|
||||
func (t Tag) Extensions() []string {
|
||||
e := []string{}
|
||||
for i := int(t.pExt); i < len(t.str)-1; {
|
||||
var ext string
|
||||
i, ext = getExtension(t.str, i)
|
||||
e = append(e, ext)
|
||||
}
|
||||
return e
|
||||
}
|
||||
|
||||
// TypeForKey returns the type associated with the given key, where key and type
|
||||
// are of the allowed values defined for the Unicode locale extension ('u') in
|
||||
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
|
||||
// TypeForKey will traverse the inheritance chain to get the correct value.
|
||||
//
|
||||
// If there are multiple types associated with a key, only the first will be
|
||||
// returned. If there is no type associated with a key, it returns the empty
|
||||
// string.
|
||||
func (t Tag) TypeForKey(key string) string {
|
||||
if _, start, end, _ := t.findTypeForKey(key); end != start {
|
||||
s := t.str[start:end]
|
||||
if p := strings.IndexByte(s, '-'); p >= 0 {
|
||||
s = s[:p]
|
||||
}
|
||||
return s
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var (
|
||||
errPrivateUse = errors.New("cannot set a key on a private use tag")
|
||||
errInvalidArguments = errors.New("invalid key or type")
|
||||
)
|
||||
|
||||
// SetTypeForKey returns a new Tag with the key set to type, where key and type
|
||||
// are of the allowed values defined for the Unicode locale extension ('u') in
|
||||
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
|
||||
// An empty value removes an existing pair with the same key.
|
||||
func (t Tag) SetTypeForKey(key, value string) (Tag, error) {
|
||||
if t.IsPrivateUse() {
|
||||
return t, errPrivateUse
|
||||
}
|
||||
if len(key) != 2 {
|
||||
return t, errInvalidArguments
|
||||
}
|
||||
|
||||
// Remove the setting if value is "".
|
||||
if value == "" {
|
||||
start, sep, end, _ := t.findTypeForKey(key)
|
||||
if start != sep {
|
||||
// Remove a possible empty extension.
|
||||
switch {
|
||||
case t.str[start-2] != '-': // has previous elements.
|
||||
case end == len(t.str), // end of string
|
||||
end+2 < len(t.str) && t.str[end+2] == '-': // end of extension
|
||||
start -= 2
|
||||
}
|
||||
if start == int(t.pVariant) && end == len(t.str) {
|
||||
t.str = ""
|
||||
t.pVariant, t.pExt = 0, 0
|
||||
} else {
|
||||
t.str = fmt.Sprintf("%s%s", t.str[:start], t.str[end:])
|
||||
}
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
if len(value) < 3 || len(value) > 8 {
|
||||
return t, errInvalidArguments
|
||||
}
|
||||
|
||||
var (
|
||||
buf [maxCoreSize + maxSimpleUExtensionSize]byte
|
||||
uStart int // start of the -u extension.
|
||||
)
|
||||
|
||||
// Generate the tag string if needed.
|
||||
if t.str == "" {
|
||||
uStart = t.genCoreBytes(buf[:])
|
||||
buf[uStart] = '-'
|
||||
uStart++
|
||||
}
|
||||
|
||||
// Create new key-type pair and parse it to verify.
|
||||
b := buf[uStart:]
|
||||
copy(b, "u-")
|
||||
copy(b[2:], key)
|
||||
b[4] = '-'
|
||||
b = b[:5+copy(b[5:], value)]
|
||||
scan := makeScanner(b)
|
||||
if parseExtensions(&scan); scan.err != nil {
|
||||
return t, scan.err
|
||||
}
|
||||
|
||||
// Assemble the replacement string.
|
||||
if t.str == "" {
|
||||
t.pVariant, t.pExt = byte(uStart-1), uint16(uStart-1)
|
||||
t.str = string(buf[:uStart+len(b)])
|
||||
} else {
|
||||
s := t.str
|
||||
start, sep, end, hasExt := t.findTypeForKey(key)
|
||||
if start == sep {
|
||||
if hasExt {
|
||||
b = b[2:]
|
||||
}
|
||||
t.str = fmt.Sprintf("%s-%s%s", s[:sep], b, s[end:])
|
||||
} else {
|
||||
t.str = fmt.Sprintf("%s-%s%s", s[:start+3], value, s[end:])
|
||||
}
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// findTypeForKey returns the start and end position for the type corresponding
|
||||
// to key or the point at which to insert the key-value pair if the type
|
||||
// wasn't found. The hasExt return value reports whether an -u extension was present.
|
||||
// Note: the extensions are typically very small and are likely to contain
|
||||
// only one key-type pair.
|
||||
func (t Tag) findTypeForKey(key string) (start, sep, end int, hasExt bool) {
|
||||
p := int(t.pExt)
|
||||
if len(key) != 2 || p == len(t.str) || p == 0 {
|
||||
return p, p, p, false
|
||||
}
|
||||
s := t.str
|
||||
|
||||
// Find the correct extension.
|
||||
for p++; s[p] != 'u'; p++ {
|
||||
if s[p] > 'u' {
|
||||
p--
|
||||
return p, p, p, false
|
||||
}
|
||||
if p = nextExtension(s, p); p == len(s) {
|
||||
return len(s), len(s), len(s), false
|
||||
}
|
||||
}
|
||||
// Proceed to the hyphen following the extension name.
|
||||
p++
|
||||
|
||||
// curKey is the key currently being processed.
|
||||
curKey := ""
|
||||
|
||||
// Iterate over keys until we get the end of a section.
|
||||
for {
|
||||
end = p
|
||||
for p++; p < len(s) && s[p] != '-'; p++ {
|
||||
}
|
||||
n := p - end - 1
|
||||
if n <= 2 && curKey == key {
|
||||
if sep < end {
|
||||
sep++
|
||||
}
|
||||
return start, sep, end, true
|
||||
}
|
||||
switch n {
|
||||
case 0, // invalid string
|
||||
1: // next extension
|
||||
return end, end, end, true
|
||||
case 2:
|
||||
// next key
|
||||
curKey = s[end+1 : p]
|
||||
if curKey > key {
|
||||
return end, end, end, true
|
||||
}
|
||||
start = end
|
||||
sep = p
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ParseBase parses a 2- or 3-letter ISO 639 code.
|
||||
// It returns a ValueError if s is a well-formed but unknown language identifier
|
||||
// or another error if another error occurred.
|
||||
func ParseBase(s string) (l Language, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
l = 0
|
||||
err = ErrSyntax
|
||||
}
|
||||
}()
|
||||
|
||||
if n := len(s); n < 2 || 3 < n {
|
||||
return 0, ErrSyntax
|
||||
}
|
||||
var buf [3]byte
|
||||
return getLangID(buf[:copy(buf[:], s)])
|
||||
}
|
||||
|
||||
// ParseScript parses a 4-letter ISO 15924 code.
|
||||
// It returns a ValueError if s is a well-formed but unknown script identifier
|
||||
// or another error if another error occurred.
|
||||
func ParseScript(s string) (scr Script, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
scr = 0
|
||||
err = ErrSyntax
|
||||
}
|
||||
}()
|
||||
|
||||
if len(s) != 4 {
|
||||
return 0, ErrSyntax
|
||||
}
|
||||
var buf [4]byte
|
||||
return getScriptID(script, buf[:copy(buf[:], s)])
|
||||
}
|
||||
|
||||
// EncodeM49 returns the Region for the given UN M.49 code.
|
||||
// It returns an error if r is not a valid code.
|
||||
func EncodeM49(r int) (Region, error) {
|
||||
return getRegionM49(r)
|
||||
}
|
||||
|
||||
// ParseRegion parses a 2- or 3-letter ISO 3166-1 or a UN M.49 code.
|
||||
// It returns a ValueError if s is a well-formed but unknown region identifier
|
||||
// or another error if another error occurred.
|
||||
func ParseRegion(s string) (r Region, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
r = 0
|
||||
err = ErrSyntax
|
||||
}
|
||||
}()
|
||||
|
||||
if n := len(s); n < 2 || 3 < n {
|
||||
return 0, ErrSyntax
|
||||
}
|
||||
var buf [3]byte
|
||||
return getRegionID(buf[:copy(buf[:], s)])
|
||||
}
|
||||
|
||||
// IsCountry returns whether this region is a country or autonomous area. This
|
||||
// includes non-standard definitions from CLDR.
|
||||
func (r Region) IsCountry() bool {
|
||||
if r == 0 || r.IsGroup() || r.IsPrivateUse() && r != _XK {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// IsGroup returns whether this region defines a collection of regions. This
|
||||
// includes non-standard definitions from CLDR.
|
||||
func (r Region) IsGroup() bool {
|
||||
if r == 0 {
|
||||
return false
|
||||
}
|
||||
return int(regionInclusion[r]) < len(regionContainment)
|
||||
}
|
||||
|
||||
// Contains returns whether Region c is contained by Region r. It returns true
|
||||
// if c == r.
|
||||
func (r Region) Contains(c Region) bool {
|
||||
if r == c {
|
||||
return true
|
||||
}
|
||||
g := regionInclusion[r]
|
||||
if g >= nRegionGroups {
|
||||
return false
|
||||
}
|
||||
m := regionContainment[g]
|
||||
|
||||
d := regionInclusion[c]
|
||||
b := regionInclusionBits[d]
|
||||
|
||||
// A contained country may belong to multiple disjoint groups. Matching any
|
||||
// of these indicates containment. If the contained region is a group, it
|
||||
// must strictly be a subset.
|
||||
if d >= nRegionGroups {
|
||||
return b&m != 0
|
||||
}
|
||||
return b&^m == 0
|
||||
}
|
||||
|
||||
var errNoTLD = errors.New("language: region is not a valid ccTLD")
|
||||
|
||||
// TLD returns the country code top-level domain (ccTLD). UK is returned for GB.
|
||||
// In all other cases it returns either the region itself or an error.
|
||||
//
|
||||
// This method may return an error for a region for which there exists a
|
||||
// canonical form with a ccTLD. To get that ccTLD canonicalize r first. The
|
||||
// region will already be canonicalized it was obtained from a Tag that was
|
||||
// obtained using any of the default methods.
|
||||
func (r Region) TLD() (Region, error) {
|
||||
// See http://en.wikipedia.org/wiki/Country_code_top-level_domain for the
|
||||
// difference between ISO 3166-1 and IANA ccTLD.
|
||||
if r == _GB {
|
||||
r = _UK
|
||||
}
|
||||
if (r.typ() & ccTLD) == 0 {
|
||||
return 0, errNoTLD
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
|
||||
// Canonicalize returns the region or a possible replacement if the region is
|
||||
// deprecated. It will not return a replacement for deprecated regions that
|
||||
// are split into multiple regions.
|
||||
func (r Region) Canonicalize() Region {
|
||||
if cr := normRegion(r); cr != 0 {
|
||||
return cr
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// Variant represents a registered variant of a language as defined by BCP 47.
|
||||
type Variant struct {
|
||||
ID uint8
|
||||
str string
|
||||
}
|
||||
|
||||
// ParseVariant parses and returns a Variant. An error is returned if s is not
|
||||
// a valid variant.
|
||||
func ParseVariant(s string) (v Variant, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
v = Variant{}
|
||||
err = ErrSyntax
|
||||
}
|
||||
}()
|
||||
|
||||
s = strings.ToLower(s)
|
||||
if id, ok := variantIndex[s]; ok {
|
||||
return Variant{id, s}, nil
|
||||
}
|
||||
return Variant{}, NewValueError([]byte(s))
|
||||
}
|
||||
|
||||
// String returns the string representation of the variant.
|
||||
func (v Variant) String() string {
|
||||
return v.str
|
||||
}
|
||||
412
vendor/golang.org/x/text/internal/language/lookup.go
generated
vendored
Normal file
412
vendor/golang.org/x/text/internal/language/lookup.go
generated
vendored
Normal file
|
|
@ -0,0 +1,412 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package language
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
|
||||
"golang.org/x/text/internal/tag"
|
||||
)
|
||||
|
||||
// findIndex tries to find the given tag in idx and returns a standardized error
|
||||
// if it could not be found.
|
||||
func findIndex(idx tag.Index, key []byte, form string) (index int, err error) {
|
||||
if !tag.FixCase(form, key) {
|
||||
return 0, ErrSyntax
|
||||
}
|
||||
i := idx.Index(key)
|
||||
if i == -1 {
|
||||
return 0, NewValueError(key)
|
||||
}
|
||||
return i, nil
|
||||
}
|
||||
|
||||
func searchUint(imap []uint16, key uint16) int {
|
||||
return sort.Search(len(imap), func(i int) bool {
|
||||
return imap[i] >= key
|
||||
})
|
||||
}
|
||||
|
||||
type Language uint16
|
||||
|
||||
// getLangID returns the langID of s if s is a canonical subtag
|
||||
// or langUnknown if s is not a canonical subtag.
|
||||
func getLangID(s []byte) (Language, error) {
|
||||
if len(s) == 2 {
|
||||
return getLangISO2(s)
|
||||
}
|
||||
return getLangISO3(s)
|
||||
}
|
||||
|
||||
// TODO language normalization as well as the AliasMaps could be moved to the
|
||||
// higher level package, but it is a bit tricky to separate the generation.
|
||||
|
||||
func (id Language) Canonicalize() (Language, AliasType) {
|
||||
return normLang(id)
|
||||
}
|
||||
|
||||
// normLang returns the mapped langID of id according to mapping m.
|
||||
func normLang(id Language) (Language, AliasType) {
|
||||
k := sort.Search(len(AliasMap), func(i int) bool {
|
||||
return AliasMap[i].From >= uint16(id)
|
||||
})
|
||||
if k < len(AliasMap) && AliasMap[k].From == uint16(id) {
|
||||
return Language(AliasMap[k].To), AliasTypes[k]
|
||||
}
|
||||
return id, AliasTypeUnknown
|
||||
}
|
||||
|
||||
// getLangISO2 returns the langID for the given 2-letter ISO language code
|
||||
// or unknownLang if this does not exist.
|
||||
func getLangISO2(s []byte) (Language, error) {
|
||||
if !tag.FixCase("zz", s) {
|
||||
return 0, ErrSyntax
|
||||
}
|
||||
if i := lang.Index(s); i != -1 && lang.Elem(i)[3] != 0 {
|
||||
return Language(i), nil
|
||||
}
|
||||
return 0, NewValueError(s)
|
||||
}
|
||||
|
||||
const base = 'z' - 'a' + 1
|
||||
|
||||
func strToInt(s []byte) uint {
|
||||
v := uint(0)
|
||||
for i := 0; i < len(s); i++ {
|
||||
v *= base
|
||||
v += uint(s[i] - 'a')
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// converts the given integer to the original ASCII string passed to strToInt.
|
||||
// len(s) must match the number of characters obtained.
|
||||
func intToStr(v uint, s []byte) {
|
||||
for i := len(s) - 1; i >= 0; i-- {
|
||||
s[i] = byte(v%base) + 'a'
|
||||
v /= base
|
||||
}
|
||||
}
|
||||
|
||||
// getLangISO3 returns the langID for the given 3-letter ISO language code
|
||||
// or unknownLang if this does not exist.
|
||||
func getLangISO3(s []byte) (Language, error) {
|
||||
if tag.FixCase("und", s) {
|
||||
// first try to match canonical 3-letter entries
|
||||
for i := lang.Index(s[:2]); i != -1; i = lang.Next(s[:2], i) {
|
||||
if e := lang.Elem(i); e[3] == 0 && e[2] == s[2] {
|
||||
// We treat "und" as special and always translate it to "unspecified".
|
||||
// Note that ZZ and Zzzz are private use and are not treated as
|
||||
// unspecified by default.
|
||||
id := Language(i)
|
||||
if id == nonCanonicalUnd {
|
||||
return 0, nil
|
||||
}
|
||||
return id, nil
|
||||
}
|
||||
}
|
||||
if i := altLangISO3.Index(s); i != -1 {
|
||||
return Language(altLangIndex[altLangISO3.Elem(i)[3]]), nil
|
||||
}
|
||||
n := strToInt(s)
|
||||
if langNoIndex[n/8]&(1<<(n%8)) != 0 {
|
||||
return Language(n) + langNoIndexOffset, nil
|
||||
}
|
||||
// Check for non-canonical uses of ISO3.
|
||||
for i := lang.Index(s[:1]); i != -1; i = lang.Next(s[:1], i) {
|
||||
if e := lang.Elem(i); e[2] == s[1] && e[3] == s[2] {
|
||||
return Language(i), nil
|
||||
}
|
||||
}
|
||||
return 0, NewValueError(s)
|
||||
}
|
||||
return 0, ErrSyntax
|
||||
}
|
||||
|
||||
// StringToBuf writes the string to b and returns the number of bytes
|
||||
// written. cap(b) must be >= 3.
|
||||
func (id Language) StringToBuf(b []byte) int {
|
||||
if id >= langNoIndexOffset {
|
||||
intToStr(uint(id)-langNoIndexOffset, b[:3])
|
||||
return 3
|
||||
} else if id == 0 {
|
||||
return copy(b, "und")
|
||||
}
|
||||
l := lang[id<<2:]
|
||||
if l[3] == 0 {
|
||||
return copy(b, l[:3])
|
||||
}
|
||||
return copy(b, l[:2])
|
||||
}
|
||||
|
||||
// String returns the BCP 47 representation of the langID.
|
||||
// Use b as variable name, instead of id, to ensure the variable
|
||||
// used is consistent with that of Base in which this type is embedded.
|
||||
func (b Language) String() string {
|
||||
if b == 0 {
|
||||
return "und"
|
||||
} else if b >= langNoIndexOffset {
|
||||
b -= langNoIndexOffset
|
||||
buf := [3]byte{}
|
||||
intToStr(uint(b), buf[:])
|
||||
return string(buf[:])
|
||||
}
|
||||
l := lang.Elem(int(b))
|
||||
if l[3] == 0 {
|
||||
return l[:3]
|
||||
}
|
||||
return l[:2]
|
||||
}
|
||||
|
||||
// ISO3 returns the ISO 639-3 language code.
|
||||
func (b Language) ISO3() string {
|
||||
if b == 0 || b >= langNoIndexOffset {
|
||||
return b.String()
|
||||
}
|
||||
l := lang.Elem(int(b))
|
||||
if l[3] == 0 {
|
||||
return l[:3]
|
||||
} else if l[2] == 0 {
|
||||
return altLangISO3.Elem(int(l[3]))[:3]
|
||||
}
|
||||
// This allocation will only happen for 3-letter ISO codes
|
||||
// that are non-canonical BCP 47 language identifiers.
|
||||
return l[0:1] + l[2:4]
|
||||
}
|
||||
|
||||
// IsPrivateUse reports whether this language code is reserved for private use.
|
||||
func (b Language) IsPrivateUse() bool {
|
||||
return langPrivateStart <= b && b <= langPrivateEnd
|
||||
}
|
||||
|
||||
// SuppressScript returns the script marked as SuppressScript in the IANA
|
||||
// language tag repository, or 0 if there is no such script.
|
||||
func (b Language) SuppressScript() Script {
|
||||
if b < langNoIndexOffset {
|
||||
return Script(suppressScript[b])
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type Region uint16
|
||||
|
||||
// getRegionID returns the region id for s if s is a valid 2-letter region code
|
||||
// or unknownRegion.
|
||||
func getRegionID(s []byte) (Region, error) {
|
||||
if len(s) == 3 {
|
||||
if isAlpha(s[0]) {
|
||||
return getRegionISO3(s)
|
||||
}
|
||||
if i, err := strconv.ParseUint(string(s), 10, 10); err == nil {
|
||||
return getRegionM49(int(i))
|
||||
}
|
||||
}
|
||||
return getRegionISO2(s)
|
||||
}
|
||||
|
||||
// getRegionISO2 returns the regionID for the given 2-letter ISO country code
|
||||
// or unknownRegion if this does not exist.
|
||||
func getRegionISO2(s []byte) (Region, error) {
|
||||
i, err := findIndex(regionISO, s, "ZZ")
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return Region(i) + isoRegionOffset, nil
|
||||
}
|
||||
|
||||
// getRegionISO3 returns the regionID for the given 3-letter ISO country code
|
||||
// or unknownRegion if this does not exist.
|
||||
func getRegionISO3(s []byte) (Region, error) {
|
||||
if tag.FixCase("ZZZ", s) {
|
||||
for i := regionISO.Index(s[:1]); i != -1; i = regionISO.Next(s[:1], i) {
|
||||
if e := regionISO.Elem(i); e[2] == s[1] && e[3] == s[2] {
|
||||
return Region(i) + isoRegionOffset, nil
|
||||
}
|
||||
}
|
||||
for i := 0; i < len(altRegionISO3); i += 3 {
|
||||
if tag.Compare(altRegionISO3[i:i+3], s) == 0 {
|
||||
return Region(altRegionIDs[i/3]), nil
|
||||
}
|
||||
}
|
||||
return 0, NewValueError(s)
|
||||
}
|
||||
return 0, ErrSyntax
|
||||
}
|
||||
|
||||
func getRegionM49(n int) (Region, error) {
|
||||
if 0 < n && n <= 999 {
|
||||
const (
|
||||
searchBits = 7
|
||||
regionBits = 9
|
||||
regionMask = 1<<regionBits - 1
|
||||
)
|
||||
idx := n >> searchBits
|
||||
buf := fromM49[m49Index[idx]:m49Index[idx+1]]
|
||||
val := uint16(n) << regionBits // we rely on bits shifting out
|
||||
i := sort.Search(len(buf), func(i int) bool {
|
||||
return buf[i] >= val
|
||||
})
|
||||
if r := fromM49[int(m49Index[idx])+i]; r&^regionMask == val {
|
||||
return Region(r & regionMask), nil
|
||||
}
|
||||
}
|
||||
var e ValueError
|
||||
fmt.Fprint(bytes.NewBuffer([]byte(e.v[:])), n)
|
||||
return 0, e
|
||||
}
|
||||
|
||||
// normRegion returns a region if r is deprecated or 0 otherwise.
|
||||
// TODO: consider supporting BYS (-> BLR), CSK (-> 200 or CZ), PHI (-> PHL) and AFI (-> DJ).
|
||||
// TODO: consider mapping split up regions to new most populous one (like CLDR).
|
||||
func normRegion(r Region) Region {
|
||||
m := regionOldMap
|
||||
k := sort.Search(len(m), func(i int) bool {
|
||||
return m[i].From >= uint16(r)
|
||||
})
|
||||
if k < len(m) && m[k].From == uint16(r) {
|
||||
return Region(m[k].To)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
const (
|
||||
iso3166UserAssigned = 1 << iota
|
||||
ccTLD
|
||||
bcp47Region
|
||||
)
|
||||
|
||||
func (r Region) typ() byte {
|
||||
return regionTypes[r]
|
||||
}
|
||||
|
||||
// String returns the BCP 47 representation for the region.
|
||||
// It returns "ZZ" for an unspecified region.
|
||||
func (r Region) String() string {
|
||||
if r < isoRegionOffset {
|
||||
if r == 0 {
|
||||
return "ZZ"
|
||||
}
|
||||
return fmt.Sprintf("%03d", r.M49())
|
||||
}
|
||||
r -= isoRegionOffset
|
||||
return regionISO.Elem(int(r))[:2]
|
||||
}
|
||||
|
||||
// ISO3 returns the 3-letter ISO code of r.
|
||||
// Note that not all regions have a 3-letter ISO code.
|
||||
// In such cases this method returns "ZZZ".
|
||||
func (r Region) ISO3() string {
|
||||
if r < isoRegionOffset {
|
||||
return "ZZZ"
|
||||
}
|
||||
r -= isoRegionOffset
|
||||
reg := regionISO.Elem(int(r))
|
||||
switch reg[2] {
|
||||
case 0:
|
||||
return altRegionISO3[reg[3]:][:3]
|
||||
case ' ':
|
||||
return "ZZZ"
|
||||
}
|
||||
return reg[0:1] + reg[2:4]
|
||||
}
|
||||
|
||||
// M49 returns the UN M.49 encoding of r, or 0 if this encoding
|
||||
// is not defined for r.
|
||||
func (r Region) M49() int {
|
||||
return int(m49[r])
|
||||
}
|
||||
|
||||
// IsPrivateUse reports whether r has the ISO 3166 User-assigned status. This
|
||||
// may include private-use tags that are assigned by CLDR and used in this
|
||||
// implementation. So IsPrivateUse and IsCountry can be simultaneously true.
|
||||
func (r Region) IsPrivateUse() bool {
|
||||
return r.typ()&iso3166UserAssigned != 0
|
||||
}
|
||||
|
||||
type Script uint16
|
||||
|
||||
// getScriptID returns the script id for string s. It assumes that s
|
||||
// is of the format [A-Z][a-z]{3}.
|
||||
func getScriptID(idx tag.Index, s []byte) (Script, error) {
|
||||
i, err := findIndex(idx, s, "Zzzz")
|
||||
return Script(i), err
|
||||
}
|
||||
|
||||
// String returns the script code in title case.
|
||||
// It returns "Zzzz" for an unspecified script.
|
||||
func (s Script) String() string {
|
||||
if s == 0 {
|
||||
return "Zzzz"
|
||||
}
|
||||
return script.Elem(int(s))
|
||||
}
|
||||
|
||||
// IsPrivateUse reports whether this script code is reserved for private use.
|
||||
func (s Script) IsPrivateUse() bool {
|
||||
return _Qaaa <= s && s <= _Qabx
|
||||
}
|
||||
|
||||
const (
|
||||
maxAltTaglen = len("en-US-POSIX")
|
||||
maxLen = maxAltTaglen
|
||||
)
|
||||
|
||||
var (
|
||||
// grandfatheredMap holds a mapping from legacy and grandfathered tags to
|
||||
// their base language or index to more elaborate tag.
|
||||
grandfatheredMap = map[[maxLen]byte]int16{
|
||||
[maxLen]byte{'a', 'r', 't', '-', 'l', 'o', 'j', 'b', 'a', 'n'}: _jbo, // art-lojban
|
||||
[maxLen]byte{'i', '-', 'a', 'm', 'i'}: _ami, // i-ami
|
||||
[maxLen]byte{'i', '-', 'b', 'n', 'n'}: _bnn, // i-bnn
|
||||
[maxLen]byte{'i', '-', 'h', 'a', 'k'}: _hak, // i-hak
|
||||
[maxLen]byte{'i', '-', 'k', 'l', 'i', 'n', 'g', 'o', 'n'}: _tlh, // i-klingon
|
||||
[maxLen]byte{'i', '-', 'l', 'u', 'x'}: _lb, // i-lux
|
||||
[maxLen]byte{'i', '-', 'n', 'a', 'v', 'a', 'j', 'o'}: _nv, // i-navajo
|
||||
[maxLen]byte{'i', '-', 'p', 'w', 'n'}: _pwn, // i-pwn
|
||||
[maxLen]byte{'i', '-', 't', 'a', 'o'}: _tao, // i-tao
|
||||
[maxLen]byte{'i', '-', 't', 'a', 'y'}: _tay, // i-tay
|
||||
[maxLen]byte{'i', '-', 't', 's', 'u'}: _tsu, // i-tsu
|
||||
[maxLen]byte{'n', 'o', '-', 'b', 'o', 'k'}: _nb, // no-bok
|
||||
[maxLen]byte{'n', 'o', '-', 'n', 'y', 'n'}: _nn, // no-nyn
|
||||
[maxLen]byte{'s', 'g', 'n', '-', 'b', 'e', '-', 'f', 'r'}: _sfb, // sgn-BE-FR
|
||||
[maxLen]byte{'s', 'g', 'n', '-', 'b', 'e', '-', 'n', 'l'}: _vgt, // sgn-BE-NL
|
||||
[maxLen]byte{'s', 'g', 'n', '-', 'c', 'h', '-', 'd', 'e'}: _sgg, // sgn-CH-DE
|
||||
[maxLen]byte{'z', 'h', '-', 'g', 'u', 'o', 'y', 'u'}: _cmn, // zh-guoyu
|
||||
[maxLen]byte{'z', 'h', '-', 'h', 'a', 'k', 'k', 'a'}: _hak, // zh-hakka
|
||||
[maxLen]byte{'z', 'h', '-', 'm', 'i', 'n', '-', 'n', 'a', 'n'}: _nan, // zh-min-nan
|
||||
[maxLen]byte{'z', 'h', '-', 'x', 'i', 'a', 'n', 'g'}: _hsn, // zh-xiang
|
||||
|
||||
// Grandfathered tags with no modern replacement will be converted as
|
||||
// follows:
|
||||
[maxLen]byte{'c', 'e', 'l', '-', 'g', 'a', 'u', 'l', 'i', 's', 'h'}: -1, // cel-gaulish
|
||||
[maxLen]byte{'e', 'n', '-', 'g', 'b', '-', 'o', 'e', 'd'}: -2, // en-GB-oed
|
||||
[maxLen]byte{'i', '-', 'd', 'e', 'f', 'a', 'u', 'l', 't'}: -3, // i-default
|
||||
[maxLen]byte{'i', '-', 'e', 'n', 'o', 'c', 'h', 'i', 'a', 'n'}: -4, // i-enochian
|
||||
[maxLen]byte{'i', '-', 'm', 'i', 'n', 'g', 'o'}: -5, // i-mingo
|
||||
[maxLen]byte{'z', 'h', '-', 'm', 'i', 'n'}: -6, // zh-min
|
||||
|
||||
// CLDR-specific tag.
|
||||
[maxLen]byte{'r', 'o', 'o', 't'}: 0, // root
|
||||
[maxLen]byte{'e', 'n', '-', 'u', 's', '-', 'p', 'o', 's', 'i', 'x'}: -7, // en_US_POSIX"
|
||||
}
|
||||
|
||||
altTagIndex = [...]uint8{0, 17, 31, 45, 61, 74, 86, 102}
|
||||
|
||||
altTags = "xtg-x-cel-gaulishen-GB-oxendicten-x-i-defaultund-x-i-enochiansee-x-i-mingonan-x-zh-minen-US-u-va-posix"
|
||||
)
|
||||
|
||||
func grandfathered(s [maxAltTaglen]byte) (t Tag, ok bool) {
|
||||
if v, ok := grandfatheredMap[s]; ok {
|
||||
if v < 0 {
|
||||
return Make(altTags[altTagIndex[-v-1]:altTagIndex[-v]]), true
|
||||
}
|
||||
t.LangID = Language(v)
|
||||
return t, true
|
||||
}
|
||||
return t, false
|
||||
}
|
||||
226
vendor/golang.org/x/text/internal/language/match.go
generated
vendored
Normal file
226
vendor/golang.org/x/text/internal/language/match.go
generated
vendored
Normal file
|
|
@ -0,0 +1,226 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package language
|
||||
|
||||
import "errors"
|
||||
|
||||
type scriptRegionFlags uint8
|
||||
|
||||
const (
|
||||
isList = 1 << iota
|
||||
scriptInFrom
|
||||
regionInFrom
|
||||
)
|
||||
|
||||
func (t *Tag) setUndefinedLang(id Language) {
|
||||
if t.LangID == 0 {
|
||||
t.LangID = id
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Tag) setUndefinedScript(id Script) {
|
||||
if t.ScriptID == 0 {
|
||||
t.ScriptID = id
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Tag) setUndefinedRegion(id Region) {
|
||||
if t.RegionID == 0 || t.RegionID.Contains(id) {
|
||||
t.RegionID = id
|
||||
}
|
||||
}
|
||||
|
||||
// ErrMissingLikelyTagsData indicates no information was available
|
||||
// to compute likely values of missing tags.
|
||||
var ErrMissingLikelyTagsData = errors.New("missing likely tags data")
|
||||
|
||||
// addLikelySubtags sets subtags to their most likely value, given the locale.
|
||||
// In most cases this means setting fields for unknown values, but in some
|
||||
// cases it may alter a value. It returns an ErrMissingLikelyTagsData error
|
||||
// if the given locale cannot be expanded.
|
||||
func (t Tag) addLikelySubtags() (Tag, error) {
|
||||
id, err := addTags(t)
|
||||
if err != nil {
|
||||
return t, err
|
||||
} else if id.equalTags(t) {
|
||||
return t, nil
|
||||
}
|
||||
id.RemakeString()
|
||||
return id, nil
|
||||
}
|
||||
|
||||
// specializeRegion attempts to specialize a group region.
|
||||
func specializeRegion(t *Tag) bool {
|
||||
if i := regionInclusion[t.RegionID]; i < nRegionGroups {
|
||||
x := likelyRegionGroup[i]
|
||||
if Language(x.lang) == t.LangID && Script(x.script) == t.ScriptID {
|
||||
t.RegionID = Region(x.region)
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Maximize returns a new tag with missing tags filled in.
|
||||
func (t Tag) Maximize() (Tag, error) {
|
||||
return addTags(t)
|
||||
}
|
||||
|
||||
func addTags(t Tag) (Tag, error) {
|
||||
// We leave private use identifiers alone.
|
||||
if t.IsPrivateUse() {
|
||||
return t, nil
|
||||
}
|
||||
if t.ScriptID != 0 && t.RegionID != 0 {
|
||||
if t.LangID != 0 {
|
||||
// already fully specified
|
||||
specializeRegion(&t)
|
||||
return t, nil
|
||||
}
|
||||
// Search matches for und-script-region. Note that for these cases
|
||||
// region will never be a group so there is no need to check for this.
|
||||
list := likelyRegion[t.RegionID : t.RegionID+1]
|
||||
if x := list[0]; x.flags&isList != 0 {
|
||||
list = likelyRegionList[x.lang : x.lang+uint16(x.script)]
|
||||
}
|
||||
for _, x := range list {
|
||||
// Deviating from the spec. See match_test.go for details.
|
||||
if Script(x.script) == t.ScriptID {
|
||||
t.setUndefinedLang(Language(x.lang))
|
||||
return t, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
if t.LangID != 0 {
|
||||
// Search matches for lang-script and lang-region, where lang != und.
|
||||
if t.LangID < langNoIndexOffset {
|
||||
x := likelyLang[t.LangID]
|
||||
if x.flags&isList != 0 {
|
||||
list := likelyLangList[x.region : x.region+uint16(x.script)]
|
||||
if t.ScriptID != 0 {
|
||||
for _, x := range list {
|
||||
if Script(x.script) == t.ScriptID && x.flags&scriptInFrom != 0 {
|
||||
t.setUndefinedRegion(Region(x.region))
|
||||
return t, nil
|
||||
}
|
||||
}
|
||||
} else if t.RegionID != 0 {
|
||||
count := 0
|
||||
goodScript := true
|
||||
tt := t
|
||||
for _, x := range list {
|
||||
// We visit all entries for which the script was not
|
||||
// defined, including the ones where the region was not
|
||||
// defined. This allows for proper disambiguation within
|
||||
// regions.
|
||||
if x.flags&scriptInFrom == 0 && t.RegionID.Contains(Region(x.region)) {
|
||||
tt.RegionID = Region(x.region)
|
||||
tt.setUndefinedScript(Script(x.script))
|
||||
goodScript = goodScript && tt.ScriptID == Script(x.script)
|
||||
count++
|
||||
}
|
||||
}
|
||||
if count == 1 {
|
||||
return tt, nil
|
||||
}
|
||||
// Even if we fail to find a unique Region, we might have
|
||||
// an unambiguous script.
|
||||
if goodScript {
|
||||
t.ScriptID = tt.ScriptID
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Search matches for und-script.
|
||||
if t.ScriptID != 0 {
|
||||
x := likelyScript[t.ScriptID]
|
||||
if x.region != 0 {
|
||||
t.setUndefinedRegion(Region(x.region))
|
||||
t.setUndefinedLang(Language(x.lang))
|
||||
return t, nil
|
||||
}
|
||||
}
|
||||
// Search matches for und-region. If und-script-region exists, it would
|
||||
// have been found earlier.
|
||||
if t.RegionID != 0 {
|
||||
if i := regionInclusion[t.RegionID]; i < nRegionGroups {
|
||||
x := likelyRegionGroup[i]
|
||||
if x.region != 0 {
|
||||
t.setUndefinedLang(Language(x.lang))
|
||||
t.setUndefinedScript(Script(x.script))
|
||||
t.RegionID = Region(x.region)
|
||||
}
|
||||
} else {
|
||||
x := likelyRegion[t.RegionID]
|
||||
if x.flags&isList != 0 {
|
||||
x = likelyRegionList[x.lang]
|
||||
}
|
||||
if x.script != 0 && x.flags != scriptInFrom {
|
||||
t.setUndefinedLang(Language(x.lang))
|
||||
t.setUndefinedScript(Script(x.script))
|
||||
return t, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Search matches for lang.
|
||||
if t.LangID < langNoIndexOffset {
|
||||
x := likelyLang[t.LangID]
|
||||
if x.flags&isList != 0 {
|
||||
x = likelyLangList[x.region]
|
||||
}
|
||||
if x.region != 0 {
|
||||
t.setUndefinedScript(Script(x.script))
|
||||
t.setUndefinedRegion(Region(x.region))
|
||||
}
|
||||
specializeRegion(&t)
|
||||
if t.LangID == 0 {
|
||||
t.LangID = _en // default language
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
return t, ErrMissingLikelyTagsData
|
||||
}
|
||||
|
||||
func (t *Tag) setTagsFrom(id Tag) {
|
||||
t.LangID = id.LangID
|
||||
t.ScriptID = id.ScriptID
|
||||
t.RegionID = id.RegionID
|
||||
}
|
||||
|
||||
// minimize removes the region or script subtags from t such that
|
||||
// t.addLikelySubtags() == t.minimize().addLikelySubtags().
|
||||
func (t Tag) minimize() (Tag, error) {
|
||||
t, err := minimizeTags(t)
|
||||
if err != nil {
|
||||
return t, err
|
||||
}
|
||||
t.RemakeString()
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// minimizeTags mimics the behavior of the ICU 51 C implementation.
|
||||
func minimizeTags(t Tag) (Tag, error) {
|
||||
if t.equalTags(Und) {
|
||||
return t, nil
|
||||
}
|
||||
max, err := addTags(t)
|
||||
if err != nil {
|
||||
return t, err
|
||||
}
|
||||
for _, id := range [...]Tag{
|
||||
{LangID: t.LangID},
|
||||
{LangID: t.LangID, RegionID: t.RegionID},
|
||||
{LangID: t.LangID, ScriptID: t.ScriptID},
|
||||
} {
|
||||
if x, err := addTags(id); err == nil && max.equalTags(x) {
|
||||
t.setTagsFrom(id)
|
||||
break
|
||||
}
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
608
vendor/golang.org/x/text/internal/language/parse.go
generated
vendored
Normal file
608
vendor/golang.org/x/text/internal/language/parse.go
generated
vendored
Normal file
|
|
@ -0,0 +1,608 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package language
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"golang.org/x/text/internal/tag"
|
||||
)
|
||||
|
||||
// isAlpha returns true if the byte is not a digit.
|
||||
// b must be an ASCII letter or digit.
|
||||
func isAlpha(b byte) bool {
|
||||
return b > '9'
|
||||
}
|
||||
|
||||
// isAlphaNum returns true if the string contains only ASCII letters or digits.
|
||||
func isAlphaNum(s []byte) bool {
|
||||
for _, c := range s {
|
||||
if !('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z' || '0' <= c && c <= '9') {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// ErrSyntax is returned by any of the parsing functions when the
|
||||
// input is not well-formed, according to BCP 47.
|
||||
// TODO: return the position at which the syntax error occurred?
|
||||
var ErrSyntax = errors.New("language: tag is not well-formed")
|
||||
|
||||
// ErrDuplicateKey is returned when a tag contains the same key twice with
|
||||
// different values in the -u section.
|
||||
var ErrDuplicateKey = errors.New("language: different values for same key in -u extension")
|
||||
|
||||
// ValueError is returned by any of the parsing functions when the
|
||||
// input is well-formed but the respective subtag is not recognized
|
||||
// as a valid value.
|
||||
type ValueError struct {
|
||||
v [8]byte
|
||||
}
|
||||
|
||||
// NewValueError creates a new ValueError.
|
||||
func NewValueError(tag []byte) ValueError {
|
||||
var e ValueError
|
||||
copy(e.v[:], tag)
|
||||
return e
|
||||
}
|
||||
|
||||
func (e ValueError) tag() []byte {
|
||||
n := bytes.IndexByte(e.v[:], 0)
|
||||
if n == -1 {
|
||||
n = 8
|
||||
}
|
||||
return e.v[:n]
|
||||
}
|
||||
|
||||
// Error implements the error interface.
|
||||
func (e ValueError) Error() string {
|
||||
return fmt.Sprintf("language: subtag %q is well-formed but unknown", e.tag())
|
||||
}
|
||||
|
||||
// Subtag returns the subtag for which the error occurred.
|
||||
func (e ValueError) Subtag() string {
|
||||
return string(e.tag())
|
||||
}
|
||||
|
||||
// scanner is used to scan BCP 47 tokens, which are separated by _ or -.
|
||||
type scanner struct {
|
||||
b []byte
|
||||
bytes [max99thPercentileSize]byte
|
||||
token []byte
|
||||
start int // start position of the current token
|
||||
end int // end position of the current token
|
||||
next int // next point for scan
|
||||
err error
|
||||
done bool
|
||||
}
|
||||
|
||||
func makeScannerString(s string) scanner {
|
||||
scan := scanner{}
|
||||
if len(s) <= len(scan.bytes) {
|
||||
scan.b = scan.bytes[:copy(scan.bytes[:], s)]
|
||||
} else {
|
||||
scan.b = []byte(s)
|
||||
}
|
||||
scan.init()
|
||||
return scan
|
||||
}
|
||||
|
||||
// makeScanner returns a scanner using b as the input buffer.
|
||||
// b is not copied and may be modified by the scanner routines.
|
||||
func makeScanner(b []byte) scanner {
|
||||
scan := scanner{b: b}
|
||||
scan.init()
|
||||
return scan
|
||||
}
|
||||
|
||||
func (s *scanner) init() {
|
||||
for i, c := range s.b {
|
||||
if c == '_' {
|
||||
s.b[i] = '-'
|
||||
}
|
||||
}
|
||||
s.scan()
|
||||
}
|
||||
|
||||
// restToLower converts the string between start and end to lower case.
|
||||
func (s *scanner) toLower(start, end int) {
|
||||
for i := start; i < end; i++ {
|
||||
c := s.b[i]
|
||||
if 'A' <= c && c <= 'Z' {
|
||||
s.b[i] += 'a' - 'A'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *scanner) setError(e error) {
|
||||
if s.err == nil || (e == ErrSyntax && s.err != ErrSyntax) {
|
||||
s.err = e
|
||||
}
|
||||
}
|
||||
|
||||
// resizeRange shrinks or grows the array at position oldStart such that
|
||||
// a new string of size newSize can fit between oldStart and oldEnd.
|
||||
// Sets the scan point to after the resized range.
|
||||
func (s *scanner) resizeRange(oldStart, oldEnd, newSize int) {
|
||||
s.start = oldStart
|
||||
if end := oldStart + newSize; end != oldEnd {
|
||||
diff := end - oldEnd
|
||||
var b []byte
|
||||
if n := len(s.b) + diff; n > cap(s.b) {
|
||||
b = make([]byte, n)
|
||||
copy(b, s.b[:oldStart])
|
||||
} else {
|
||||
b = s.b[:n]
|
||||
}
|
||||
copy(b[end:], s.b[oldEnd:])
|
||||
s.b = b
|
||||
s.next = end + (s.next - s.end)
|
||||
s.end = end
|
||||
}
|
||||
}
|
||||
|
||||
// replace replaces the current token with repl.
|
||||
func (s *scanner) replace(repl string) {
|
||||
s.resizeRange(s.start, s.end, len(repl))
|
||||
copy(s.b[s.start:], repl)
|
||||
}
|
||||
|
||||
// gobble removes the current token from the input.
|
||||
// Caller must call scan after calling gobble.
|
||||
func (s *scanner) gobble(e error) {
|
||||
s.setError(e)
|
||||
if s.start == 0 {
|
||||
s.b = s.b[:+copy(s.b, s.b[s.next:])]
|
||||
s.end = 0
|
||||
} else {
|
||||
s.b = s.b[:s.start-1+copy(s.b[s.start-1:], s.b[s.end:])]
|
||||
s.end = s.start - 1
|
||||
}
|
||||
s.next = s.start
|
||||
}
|
||||
|
||||
// deleteRange removes the given range from s.b before the current token.
|
||||
func (s *scanner) deleteRange(start, end int) {
|
||||
s.b = s.b[:start+copy(s.b[start:], s.b[end:])]
|
||||
diff := end - start
|
||||
s.next -= diff
|
||||
s.start -= diff
|
||||
s.end -= diff
|
||||
}
|
||||
|
||||
// scan parses the next token of a BCP 47 string. Tokens that are larger
|
||||
// than 8 characters or include non-alphanumeric characters result in an error
|
||||
// and are gobbled and removed from the output.
|
||||
// It returns the end position of the last token consumed.
|
||||
func (s *scanner) scan() (end int) {
|
||||
end = s.end
|
||||
s.token = nil
|
||||
for s.start = s.next; s.next < len(s.b); {
|
||||
i := bytes.IndexByte(s.b[s.next:], '-')
|
||||
if i == -1 {
|
||||
s.end = len(s.b)
|
||||
s.next = len(s.b)
|
||||
i = s.end - s.start
|
||||
} else {
|
||||
s.end = s.next + i
|
||||
s.next = s.end + 1
|
||||
}
|
||||
token := s.b[s.start:s.end]
|
||||
if i < 1 || i > 8 || !isAlphaNum(token) {
|
||||
s.gobble(ErrSyntax)
|
||||
continue
|
||||
}
|
||||
s.token = token
|
||||
return end
|
||||
}
|
||||
if n := len(s.b); n > 0 && s.b[n-1] == '-' {
|
||||
s.setError(ErrSyntax)
|
||||
s.b = s.b[:len(s.b)-1]
|
||||
}
|
||||
s.done = true
|
||||
return end
|
||||
}
|
||||
|
||||
// acceptMinSize parses multiple tokens of the given size or greater.
|
||||
// It returns the end position of the last token consumed.
|
||||
func (s *scanner) acceptMinSize(min int) (end int) {
|
||||
end = s.end
|
||||
s.scan()
|
||||
for ; len(s.token) >= min; s.scan() {
|
||||
end = s.end
|
||||
}
|
||||
return end
|
||||
}
|
||||
|
||||
// Parse parses the given BCP 47 string and returns a valid Tag. If parsing
|
||||
// failed it returns an error and any part of the tag that could be parsed.
|
||||
// If parsing succeeded but an unknown value was found, it returns
|
||||
// ValueError. The Tag returned in this case is just stripped of the unknown
|
||||
// value. All other values are preserved. It accepts tags in the BCP 47 format
|
||||
// and extensions to this standard defined in
|
||||
// https://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers.
|
||||
func Parse(s string) (t Tag, err error) {
|
||||
// TODO: consider supporting old-style locale key-value pairs.
|
||||
if s == "" {
|
||||
return Und, ErrSyntax
|
||||
}
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
t = Und
|
||||
err = ErrSyntax
|
||||
return
|
||||
}
|
||||
}()
|
||||
if len(s) <= maxAltTaglen {
|
||||
b := [maxAltTaglen]byte{}
|
||||
for i, c := range s {
|
||||
// Generating invalid UTF-8 is okay as it won't match.
|
||||
if 'A' <= c && c <= 'Z' {
|
||||
c += 'a' - 'A'
|
||||
} else if c == '_' {
|
||||
c = '-'
|
||||
}
|
||||
b[i] = byte(c)
|
||||
}
|
||||
if t, ok := grandfathered(b); ok {
|
||||
return t, nil
|
||||
}
|
||||
}
|
||||
scan := makeScannerString(s)
|
||||
return parse(&scan, s)
|
||||
}
|
||||
|
||||
func parse(scan *scanner, s string) (t Tag, err error) {
|
||||
t = Und
|
||||
var end int
|
||||
if n := len(scan.token); n <= 1 {
|
||||
scan.toLower(0, len(scan.b))
|
||||
if n == 0 || scan.token[0] != 'x' {
|
||||
return t, ErrSyntax
|
||||
}
|
||||
end = parseExtensions(scan)
|
||||
} else if n >= 4 {
|
||||
return Und, ErrSyntax
|
||||
} else { // the usual case
|
||||
t, end = parseTag(scan, true)
|
||||
if n := len(scan.token); n == 1 {
|
||||
t.pExt = uint16(end)
|
||||
end = parseExtensions(scan)
|
||||
} else if end < len(scan.b) {
|
||||
scan.setError(ErrSyntax)
|
||||
scan.b = scan.b[:end]
|
||||
}
|
||||
}
|
||||
if int(t.pVariant) < len(scan.b) {
|
||||
if end < len(s) {
|
||||
s = s[:end]
|
||||
}
|
||||
if len(s) > 0 && tag.Compare(s, scan.b) == 0 {
|
||||
t.str = s
|
||||
} else {
|
||||
t.str = string(scan.b)
|
||||
}
|
||||
} else {
|
||||
t.pVariant, t.pExt = 0, 0
|
||||
}
|
||||
return t, scan.err
|
||||
}
|
||||
|
||||
// parseTag parses language, script, region and variants.
|
||||
// It returns a Tag and the end position in the input that was parsed.
|
||||
// If doNorm is true, then <lang>-<extlang> will be normalized to <extlang>.
|
||||
func parseTag(scan *scanner, doNorm bool) (t Tag, end int) {
|
||||
var e error
|
||||
// TODO: set an error if an unknown lang, script or region is encountered.
|
||||
t.LangID, e = getLangID(scan.token)
|
||||
scan.setError(e)
|
||||
scan.replace(t.LangID.String())
|
||||
langStart := scan.start
|
||||
end = scan.scan()
|
||||
for len(scan.token) == 3 && isAlpha(scan.token[0]) {
|
||||
// From http://tools.ietf.org/html/bcp47, <lang>-<extlang> tags are equivalent
|
||||
// to a tag of the form <extlang>.
|
||||
if doNorm {
|
||||
lang, e := getLangID(scan.token)
|
||||
if lang != 0 {
|
||||
t.LangID = lang
|
||||
langStr := lang.String()
|
||||
copy(scan.b[langStart:], langStr)
|
||||
scan.b[langStart+len(langStr)] = '-'
|
||||
scan.start = langStart + len(langStr) + 1
|
||||
}
|
||||
scan.gobble(e)
|
||||
}
|
||||
end = scan.scan()
|
||||
}
|
||||
if len(scan.token) == 4 && isAlpha(scan.token[0]) {
|
||||
t.ScriptID, e = getScriptID(script, scan.token)
|
||||
if t.ScriptID == 0 {
|
||||
scan.gobble(e)
|
||||
}
|
||||
end = scan.scan()
|
||||
}
|
||||
if n := len(scan.token); n >= 2 && n <= 3 {
|
||||
t.RegionID, e = getRegionID(scan.token)
|
||||
if t.RegionID == 0 {
|
||||
scan.gobble(e)
|
||||
} else {
|
||||
scan.replace(t.RegionID.String())
|
||||
}
|
||||
end = scan.scan()
|
||||
}
|
||||
scan.toLower(scan.start, len(scan.b))
|
||||
t.pVariant = byte(end)
|
||||
end = parseVariants(scan, end, t)
|
||||
t.pExt = uint16(end)
|
||||
return t, end
|
||||
}
|
||||
|
||||
var separator = []byte{'-'}
|
||||
|
||||
// parseVariants scans tokens as long as each token is a valid variant string.
|
||||
// Duplicate variants are removed.
|
||||
func parseVariants(scan *scanner, end int, t Tag) int {
|
||||
start := scan.start
|
||||
varIDBuf := [4]uint8{}
|
||||
variantBuf := [4][]byte{}
|
||||
varID := varIDBuf[:0]
|
||||
variant := variantBuf[:0]
|
||||
last := -1
|
||||
needSort := false
|
||||
for ; len(scan.token) >= 4; scan.scan() {
|
||||
// TODO: measure the impact of needing this conversion and redesign
|
||||
// the data structure if there is an issue.
|
||||
v, ok := variantIndex[string(scan.token)]
|
||||
if !ok {
|
||||
// unknown variant
|
||||
// TODO: allow user-defined variants?
|
||||
scan.gobble(NewValueError(scan.token))
|
||||
continue
|
||||
}
|
||||
varID = append(varID, v)
|
||||
variant = append(variant, scan.token)
|
||||
if !needSort {
|
||||
if last < int(v) {
|
||||
last = int(v)
|
||||
} else {
|
||||
needSort = true
|
||||
// There is no legal combinations of more than 7 variants
|
||||
// (and this is by no means a useful sequence).
|
||||
const maxVariants = 8
|
||||
if len(varID) > maxVariants {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
end = scan.end
|
||||
}
|
||||
if needSort {
|
||||
sort.Sort(variantsSort{varID, variant})
|
||||
k, l := 0, -1
|
||||
for i, v := range varID {
|
||||
w := int(v)
|
||||
if l == w {
|
||||
// Remove duplicates.
|
||||
continue
|
||||
}
|
||||
varID[k] = varID[i]
|
||||
variant[k] = variant[i]
|
||||
k++
|
||||
l = w
|
||||
}
|
||||
if str := bytes.Join(variant[:k], separator); len(str) == 0 {
|
||||
end = start - 1
|
||||
} else {
|
||||
scan.resizeRange(start, end, len(str))
|
||||
copy(scan.b[scan.start:], str)
|
||||
end = scan.end
|
||||
}
|
||||
}
|
||||
return end
|
||||
}
|
||||
|
||||
type variantsSort struct {
|
||||
i []uint8
|
||||
v [][]byte
|
||||
}
|
||||
|
||||
func (s variantsSort) Len() int {
|
||||
return len(s.i)
|
||||
}
|
||||
|
||||
func (s variantsSort) Swap(i, j int) {
|
||||
s.i[i], s.i[j] = s.i[j], s.i[i]
|
||||
s.v[i], s.v[j] = s.v[j], s.v[i]
|
||||
}
|
||||
|
||||
func (s variantsSort) Less(i, j int) bool {
|
||||
return s.i[i] < s.i[j]
|
||||
}
|
||||
|
||||
type bytesSort struct {
|
||||
b [][]byte
|
||||
n int // first n bytes to compare
|
||||
}
|
||||
|
||||
func (b bytesSort) Len() int {
|
||||
return len(b.b)
|
||||
}
|
||||
|
||||
func (b bytesSort) Swap(i, j int) {
|
||||
b.b[i], b.b[j] = b.b[j], b.b[i]
|
||||
}
|
||||
|
||||
func (b bytesSort) Less(i, j int) bool {
|
||||
for k := 0; k < b.n; k++ {
|
||||
if b.b[i][k] == b.b[j][k] {
|
||||
continue
|
||||
}
|
||||
return b.b[i][k] < b.b[j][k]
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// parseExtensions parses and normalizes the extensions in the buffer.
|
||||
// It returns the last position of scan.b that is part of any extension.
|
||||
// It also trims scan.b to remove excess parts accordingly.
|
||||
func parseExtensions(scan *scanner) int {
|
||||
start := scan.start
|
||||
exts := [][]byte{}
|
||||
private := []byte{}
|
||||
end := scan.end
|
||||
for len(scan.token) == 1 {
|
||||
extStart := scan.start
|
||||
ext := scan.token[0]
|
||||
end = parseExtension(scan)
|
||||
extension := scan.b[extStart:end]
|
||||
if len(extension) < 3 || (ext != 'x' && len(extension) < 4) {
|
||||
scan.setError(ErrSyntax)
|
||||
end = extStart
|
||||
continue
|
||||
} else if start == extStart && (ext == 'x' || scan.start == len(scan.b)) {
|
||||
scan.b = scan.b[:end]
|
||||
return end
|
||||
} else if ext == 'x' {
|
||||
private = extension
|
||||
break
|
||||
}
|
||||
exts = append(exts, extension)
|
||||
}
|
||||
sort.Sort(bytesSort{exts, 1})
|
||||
if len(private) > 0 {
|
||||
exts = append(exts, private)
|
||||
}
|
||||
scan.b = scan.b[:start]
|
||||
if len(exts) > 0 {
|
||||
scan.b = append(scan.b, bytes.Join(exts, separator)...)
|
||||
} else if start > 0 {
|
||||
// Strip trailing '-'.
|
||||
scan.b = scan.b[:start-1]
|
||||
}
|
||||
return end
|
||||
}
|
||||
|
||||
// parseExtension parses a single extension and returns the position of
|
||||
// the extension end.
|
||||
func parseExtension(scan *scanner) int {
|
||||
start, end := scan.start, scan.end
|
||||
switch scan.token[0] {
|
||||
case 'u': // https://www.ietf.org/rfc/rfc6067.txt
|
||||
attrStart := end
|
||||
scan.scan()
|
||||
for last := []byte{}; len(scan.token) > 2; scan.scan() {
|
||||
if bytes.Compare(scan.token, last) != -1 {
|
||||
// Attributes are unsorted. Start over from scratch.
|
||||
p := attrStart + 1
|
||||
scan.next = p
|
||||
attrs := [][]byte{}
|
||||
for scan.scan(); len(scan.token) > 2; scan.scan() {
|
||||
attrs = append(attrs, scan.token)
|
||||
end = scan.end
|
||||
}
|
||||
sort.Sort(bytesSort{attrs, 3})
|
||||
copy(scan.b[p:], bytes.Join(attrs, separator))
|
||||
break
|
||||
}
|
||||
last = scan.token
|
||||
end = scan.end
|
||||
}
|
||||
// Scan key-type sequences. A key is of length 2 and may be followed
|
||||
// by 0 or more "type" subtags from 3 to the maximum of 8 letters.
|
||||
var last, key []byte
|
||||
for attrEnd := end; len(scan.token) == 2; last = key {
|
||||
key = scan.token
|
||||
end = scan.end
|
||||
for scan.scan(); end < scan.end && len(scan.token) > 2; scan.scan() {
|
||||
end = scan.end
|
||||
}
|
||||
// TODO: check key value validity
|
||||
if bytes.Compare(key, last) != 1 || scan.err != nil {
|
||||
// We have an invalid key or the keys are not sorted.
|
||||
// Start scanning keys from scratch and reorder.
|
||||
p := attrEnd + 1
|
||||
scan.next = p
|
||||
keys := [][]byte{}
|
||||
for scan.scan(); len(scan.token) == 2; {
|
||||
keyStart := scan.start
|
||||
end = scan.end
|
||||
for scan.scan(); end < scan.end && len(scan.token) > 2; scan.scan() {
|
||||
end = scan.end
|
||||
}
|
||||
keys = append(keys, scan.b[keyStart:end])
|
||||
}
|
||||
sort.Stable(bytesSort{keys, 2})
|
||||
if n := len(keys); n > 0 {
|
||||
k := 0
|
||||
for i := 1; i < n; i++ {
|
||||
if !bytes.Equal(keys[k][:2], keys[i][:2]) {
|
||||
k++
|
||||
keys[k] = keys[i]
|
||||
} else if !bytes.Equal(keys[k], keys[i]) {
|
||||
scan.setError(ErrDuplicateKey)
|
||||
}
|
||||
}
|
||||
keys = keys[:k+1]
|
||||
}
|
||||
reordered := bytes.Join(keys, separator)
|
||||
if e := p + len(reordered); e < end {
|
||||
scan.deleteRange(e, end)
|
||||
end = e
|
||||
}
|
||||
copy(scan.b[p:], reordered)
|
||||
break
|
||||
}
|
||||
}
|
||||
case 't': // https://www.ietf.org/rfc/rfc6497.txt
|
||||
scan.scan()
|
||||
if n := len(scan.token); n >= 2 && n <= 3 && isAlpha(scan.token[1]) {
|
||||
_, end = parseTag(scan, false)
|
||||
scan.toLower(start, end)
|
||||
}
|
||||
for len(scan.token) == 2 && !isAlpha(scan.token[1]) {
|
||||
end = scan.acceptMinSize(3)
|
||||
}
|
||||
case 'x':
|
||||
end = scan.acceptMinSize(1)
|
||||
default:
|
||||
end = scan.acceptMinSize(2)
|
||||
}
|
||||
return end
|
||||
}
|
||||
|
||||
// getExtension returns the name, body and end position of the extension.
|
||||
func getExtension(s string, p int) (end int, ext string) {
|
||||
if s[p] == '-' {
|
||||
p++
|
||||
}
|
||||
if s[p] == 'x' {
|
||||
return len(s), s[p:]
|
||||
}
|
||||
end = nextExtension(s, p)
|
||||
return end, s[p:end]
|
||||
}
|
||||
|
||||
// nextExtension finds the next extension within the string, searching
|
||||
// for the -<char>- pattern from position p.
|
||||
// In the fast majority of cases, language tags will have at most
|
||||
// one extension and extensions tend to be small.
|
||||
func nextExtension(s string, p int) int {
|
||||
for n := len(s) - 3; p < n; {
|
||||
if s[p] == '-' {
|
||||
if s[p+2] == '-' {
|
||||
return p
|
||||
}
|
||||
p += 3
|
||||
} else {
|
||||
p++
|
||||
}
|
||||
}
|
||||
return len(s)
|
||||
}
|
||||
3494
vendor/golang.org/x/text/internal/language/tables.go
generated
vendored
Normal file
3494
vendor/golang.org/x/text/internal/language/tables.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
48
vendor/golang.org/x/text/internal/language/tags.go
generated
vendored
Normal file
48
vendor/golang.org/x/text/internal/language/tags.go
generated
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package language
|
||||
|
||||
// MustParse is like Parse, but panics if the given BCP 47 tag cannot be parsed.
|
||||
// It simplifies safe initialization of Tag values.
|
||||
func MustParse(s string) Tag {
|
||||
t, err := Parse(s)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// MustParseBase is like ParseBase, but panics if the given base cannot be parsed.
|
||||
// It simplifies safe initialization of Base values.
|
||||
func MustParseBase(s string) Language {
|
||||
b, err := ParseBase(s)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// MustParseScript is like ParseScript, but panics if the given script cannot be
|
||||
// parsed. It simplifies safe initialization of Script values.
|
||||
func MustParseScript(s string) Script {
|
||||
scr, err := ParseScript(s)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return scr
|
||||
}
|
||||
|
||||
// MustParseRegion is like ParseRegion, but panics if the given region cannot be
|
||||
// parsed. It simplifies safe initialization of Region values.
|
||||
func MustParseRegion(s string) Region {
|
||||
r, err := ParseRegion(s)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// Und is the root language.
|
||||
var Und Tag
|
||||
67
vendor/golang.org/x/text/internal/match.go
generated
vendored
Normal file
67
vendor/golang.org/x/text/internal/match.go
generated
vendored
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package internal
|
||||
|
||||
// This file contains matchers that implement CLDR inheritance.
|
||||
//
|
||||
// See https://unicode.org/reports/tr35/#Locale_Inheritance.
|
||||
//
|
||||
// Some of the inheritance described in this document is already handled by
|
||||
// the cldr package.
|
||||
|
||||
import (
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
// TODO: consider if (some of the) matching algorithm needs to be public after
|
||||
// getting some feel about what is generic and what is specific.
|
||||
|
||||
// NewInheritanceMatcher returns a matcher that matches based on the inheritance
|
||||
// chain.
|
||||
//
|
||||
// The matcher uses canonicalization and the parent relationship to find a
|
||||
// match. The resulting match will always be either Und or a language with the
|
||||
// same language and script as the requested language. It will not match
|
||||
// languages for which there is understood to be mutual or one-directional
|
||||
// intelligibility.
|
||||
//
|
||||
// A Match will indicate an Exact match if the language matches after
|
||||
// canonicalization and High if the matched tag is a parent.
|
||||
func NewInheritanceMatcher(t []language.Tag) *InheritanceMatcher {
|
||||
tags := &InheritanceMatcher{make(map[language.Tag]int)}
|
||||
for i, tag := range t {
|
||||
ct, err := language.All.Canonicalize(tag)
|
||||
if err != nil {
|
||||
ct = tag
|
||||
}
|
||||
tags.index[ct] = i
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
type InheritanceMatcher struct {
|
||||
index map[language.Tag]int
|
||||
}
|
||||
|
||||
func (m InheritanceMatcher) Match(want ...language.Tag) (language.Tag, int, language.Confidence) {
|
||||
for _, t := range want {
|
||||
ct, err := language.All.Canonicalize(t)
|
||||
if err != nil {
|
||||
ct = t
|
||||
}
|
||||
conf := language.Exact
|
||||
for {
|
||||
if index, ok := m.index[ct]; ok {
|
||||
return ct, index, conf
|
||||
}
|
||||
if ct == language.Und {
|
||||
break
|
||||
}
|
||||
ct = ct.Parent()
|
||||
conf = language.High
|
||||
}
|
||||
}
|
||||
return language.Und, 0, language.No
|
||||
}
|
||||
55
vendor/golang.org/x/text/internal/number/common.go
generated
vendored
Normal file
55
vendor/golang.org/x/text/internal/number/common.go
generated
vendored
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
|
||||
|
||||
package number
|
||||
|
||||
import (
|
||||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/text/internal/language/compact"
|
||||
)
|
||||
|
||||
// A system identifies a CLDR numbering system.
|
||||
type system byte
|
||||
|
||||
type systemData struct {
|
||||
id system
|
||||
digitSize byte // number of UTF-8 bytes per digit
|
||||
zero [utf8.UTFMax]byte // UTF-8 sequence of zero digit.
|
||||
}
|
||||
|
||||
// A SymbolType identifies a symbol of a specific kind.
|
||||
type SymbolType int
|
||||
|
||||
const (
|
||||
SymDecimal SymbolType = iota
|
||||
SymGroup
|
||||
SymList
|
||||
SymPercentSign
|
||||
SymPlusSign
|
||||
SymMinusSign
|
||||
SymExponential
|
||||
SymSuperscriptingExponent
|
||||
SymPerMille
|
||||
SymInfinity
|
||||
SymNan
|
||||
SymTimeSeparator
|
||||
|
||||
NumSymbolTypes
|
||||
)
|
||||
|
||||
const hasNonLatnMask = 0x8000
|
||||
|
||||
// symOffset is an offset into altSymData if the bit indicated by hasNonLatnMask
|
||||
// is not 0 (with this bit masked out), and an offset into symIndex otherwise.
|
||||
//
|
||||
// TODO: this type can be a byte again if we use an indirection into altsymData
|
||||
// and introduce an alt -> offset slice (the length of this will be number of
|
||||
// alternatives plus 1). This also allows getting rid of the compactTag field
|
||||
// in altSymData. In total this will save about 1K.
|
||||
type symOffset uint16
|
||||
|
||||
type altSymData struct {
|
||||
compactTag compact.ID
|
||||
symIndex symOffset
|
||||
system system
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue