mirror of
https://github.com/yannh/kubeconform.git
synced 2026-02-11 14:09:21 +00:00
Compare commits
8 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e60892483e | ||
|
|
c7f8490e52 | ||
|
|
e65429b1e5 | ||
|
|
a23275d5ca | ||
|
|
3134f4477e | ||
|
|
9f04fec268 | ||
|
|
31e9679c96 | ||
|
|
df26febc54 |
148 changed files with 49084 additions and 5484 deletions
|
|
@ -1,4 +1,4 @@
|
|||
FROM alpine:3.20.2 as certs
|
||||
FROM alpine:3.21.3 as certs
|
||||
RUN apk add ca-certificates
|
||||
|
||||
FROM scratch AS kubeconform
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
FROM bats/bats:1.11.0
|
||||
RUN apk --no-cache add ca-certificates parallel libxml2-utils
|
||||
COPY dist/kubeconform_linux_amd64_v1/kubeconform /code/bin/
|
||||
COPY bin/kubeconform /code/bin/
|
||||
COPY acceptance.bats acceptance-nonetwork.bats /code/
|
||||
COPY fixtures /code/fixtures
|
||||
|
|
|
|||
10
Makefile
10
Makefile
|
|
@ -16,13 +16,13 @@ local-build-static:
|
|||
|
||||
# These only used for development. Release artifacts and docker images are produced by goreleaser.
|
||||
docker-test:
|
||||
docker run -t -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform golang:1.22.5 make local-test
|
||||
docker run -t -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform golang:1.24.3 make local-test
|
||||
|
||||
docker-build:
|
||||
docker run -t -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform golang:1.22.5 make local-build
|
||||
docker run -t -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform golang:1.24.3 make local-build
|
||||
|
||||
docker-build-static:
|
||||
docker run -t -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform golang:1.22.5 make local-build-static
|
||||
docker run -t -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform golang:1.24.3 make local-build-static
|
||||
|
||||
build-bats:
|
||||
docker build -t bats -f Dockerfile.bats .
|
||||
|
|
@ -32,11 +32,11 @@ docker-acceptance: build-bats
|
|||
docker run --network none -t bats -p acceptance-nonetwork.bats
|
||||
|
||||
goreleaser-build-static:
|
||||
docker run -t -e GOOS=linux -e GOARCH=amd64 -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform goreleaser/goreleaser:v2.1.0 build --clean --single-target --snapshot
|
||||
docker run -t -e GOOS=linux -e GOARCH=amd64 -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform goreleaser/goreleaser:v2.9.0 build --clean --single-target --snapshot
|
||||
cp dist/kubeconform_linux_amd64_v1/kubeconform bin/
|
||||
|
||||
release:
|
||||
docker run -e GITHUB_TOKEN -e GIT_OWNER -t -v /var/run/docker.sock:/var/run/docker.sock -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform goreleaser/goreleaser:v2.1.0 release --clean
|
||||
docker run -e GITHUB_TOKEN -e GIT_OWNER -t -v /var/run/docker.sock:/var/run/docker.sock -v $$PWD:/go/src/github.com/yannh/kubeconform -w /go/src/github.com/yannh/kubeconform goreleaser/goreleaser:v2.9.0 release --clean
|
||||
|
||||
update-deps:
|
||||
go get -u ./...
|
||||
|
|
|
|||
|
|
@ -19,3 +19,8 @@
|
|||
run bin/kubeconform -schema-location 'fixtures/{{ .ResourceKind }}.json' -schema-location './fixtures/registry/{{ .ResourceKind }}{{ .KindSuffix }}.json' fixtures/test_crd.yaml
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
@test "Pass when using a cached schema with external references" {
|
||||
run bin/kubeconform -cache fixtures/cache -summary -schema-location 'https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/{{ .NormalizedKubernetesVersion }}{{ .StrictSuffix }}/{{ .ResourceKind }}{{ .KindSuffix }}.json' fixtures/valid.yaml
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -276,6 +276,13 @@ resetCacheFolder() {
|
|||
[ "$output" = "failed opening cache folder cache_does_not_exist: stat cache_does_not_exist: no such file or directory" ]
|
||||
}
|
||||
|
||||
@test "HTTP references should be cached" {
|
||||
resetCacheFolder
|
||||
run bin/kubeconform -cache cache -summary -schema-location 'https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/{{ .NormalizedKubernetesVersion }}{{ .StrictSuffix }}/{{ .ResourceKind }}{{ .KindSuffix }}.json' fixtures/valid.yaml
|
||||
[ "$status" -eq 0 ]
|
||||
[ "`ls cache/ | wc -l`" -eq 2 ]
|
||||
}
|
||||
|
||||
@test "Produces correct TAP output" {
|
||||
run bin/kubeconform -output tap fixtures/valid.yaml
|
||||
[ "$status" -eq 0 ]
|
||||
|
|
@ -299,14 +306,14 @@ resetCacheFolder() {
|
|||
@test "Fail when parsing a List that contains an invalid resource" {
|
||||
run bin/kubeconform -summary fixtures/list_invalid.yaml
|
||||
[ "$status" -eq 1 ]
|
||||
[ "${lines[0]}" == 'fixtures/list_invalid.yaml - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema: '\''/spec/replicas'\'' does not validate with https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#/properties/spec/properties/replicas/type: expected integer or null, but got string' ]
|
||||
[ "${lines[0]}" == 'fixtures/list_invalid.yaml - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema validation failed with '\''https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#'\'' - at '\''/spec/replicas'\'': got string, want null or integer' ]
|
||||
[ "${lines[1]}" == 'Summary: 2 resources found in 1 file - Valid: 1, Invalid: 1, Errors: 0, Skipped: 0' ]
|
||||
}
|
||||
|
||||
@test "Fail when parsing a List that contains an invalid resource from stdin" {
|
||||
run bash -c "cat fixtures/list_invalid.yaml | bin/kubeconform -summary -"
|
||||
[ "$status" -eq 1 ]
|
||||
[ "${lines[0]}" == 'stdin - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema: '\''/spec/replicas'\'' does not validate with https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#/properties/spec/properties/replicas/type: expected integer or null, but got string' ]
|
||||
[ "${lines[0]}" == 'stdin - ReplicationController bob is invalid: problem validating schema. Check JSON formatting: jsonschema validation failed with '\''https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master-standalone/replicationcontroller-v1.json#'\'' - at '\''/spec/replicas'\'': got string, want null or integer' ]
|
||||
[ "${lines[1]}" == 'Summary: 2 resources found parsing stdin - Valid: 1, Invalid: 1, Errors: 0, Skipped: 0' ]
|
||||
}
|
||||
|
||||
|
|
@ -352,3 +359,9 @@ resetCacheFolder() {
|
|||
run bash -c "bin/kubeconform -schema-location default -schema-location 'https://raw.githubusercontent.com/datreeio/CRDs-catalog/main/{{.Group}}/{{.ResourceKind}}_{{.ResourceAPIVersion}}.json' fixtures/httpproxy.yaml"
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
||||
# https://github.com/yannh/kubeconform/pull/309
|
||||
@test "passes when validating duration not in ISO8601" {
|
||||
run bash -c "./bin/kubeconform -schema-location ./fixtures/grafanaalertrulegroup_v1beta1.json ./fixtures/grafana-alert-rule-group-sample.yaml"
|
||||
[ "$status" -eq 0 ]
|
||||
}
|
||||
|
|
|
|||
46
fixtures/cache/603105c17f981119fec20ae25cfb97ff6dd99114a875ae841ef965d9345667e3
vendored
Normal file
46
fixtures/cache/603105c17f981119fec20ae25cfb97ff6dd99114a875ae841ef965d9345667e3
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"description": "ReplicationController represents the configuration of a replication controller.",
|
||||
"properties": {
|
||||
"apiVersion": {
|
||||
"description": "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
],
|
||||
"enum": [
|
||||
"v1"
|
||||
]
|
||||
},
|
||||
"kind": {
|
||||
"description": "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
],
|
||||
"enum": [
|
||||
"ReplicationController"
|
||||
]
|
||||
},
|
||||
"metadata": {
|
||||
"$ref": "https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master/_definitions.json#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta",
|
||||
"description": "If the Labels of a ReplicationController are empty, they are defaulted to be the same as the Pod(s) that the replication controller manages. Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata"
|
||||
},
|
||||
"spec": {
|
||||
"$ref": "https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master/_definitions.json#/definitions/io.k8s.api.core.v1.ReplicationControllerSpec",
|
||||
"description": "Spec defines the specification of the desired behavior of the replication controller. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status"
|
||||
},
|
||||
"status": {
|
||||
"$ref": "https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/master/_definitions.json#/definitions/io.k8s.api.core.v1.ReplicationControllerStatus",
|
||||
"description": "Status is the most recently observed status of the replication controller. This data may be out of date by some window of time. Populated by the system. Read-only. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"x-kubernetes-group-version-kind": [
|
||||
{
|
||||
"group": "",
|
||||
"kind": "ReplicationController",
|
||||
"version": "v1"
|
||||
}
|
||||
],
|
||||
"$schema": "http://json-schema.org/schema#"
|
||||
}
|
||||
22067
fixtures/cache/6dc6142c64b944d783a3e783526114da8e747a14a11d8b32dd1f12e2d89a8330
vendored
Normal file
22067
fixtures/cache/6dc6142c64b944d783a3e783526114da8e747a14a11d8b32dd1f12e2d89a8330
vendored
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
62
fixtures/grafana-alert-rule-group-sample.yaml
Normal file
62
fixtures/grafana-alert-rule-group-sample.yaml
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
---
|
||||
apiVersion: grafana.integreatly.org/v1beta1
|
||||
kind: GrafanaAlertRuleGroup
|
||||
metadata:
|
||||
name: grafanaalertrulegroup-sample
|
||||
spec:
|
||||
folderRef: test-folder
|
||||
instanceSelector:
|
||||
matchLabels:
|
||||
dashboards: "grafana"
|
||||
interval: 5m
|
||||
rules:
|
||||
- condition: B
|
||||
data:
|
||||
- datasourceUid: grafanacloud-demoinfra-prom
|
||||
model:
|
||||
datasource:
|
||||
type: prometheus
|
||||
uid: grafanacloud-demoinfra-prom
|
||||
editorMode: code
|
||||
expr: weather_temp_c{}
|
||||
instant: true
|
||||
intervalMs: 1000
|
||||
legendFormat: __auto
|
||||
maxDataPoints: 43200
|
||||
range: false
|
||||
refId: A
|
||||
refId: A
|
||||
relativeTimeRange:
|
||||
from: 600
|
||||
- datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 0
|
||||
type: lt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params:
|
||||
- C
|
||||
reducer:
|
||||
params: []
|
||||
type: last
|
||||
type: query
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
refId: B
|
||||
type: threshold
|
||||
refId: B
|
||||
relativeTimeRange:
|
||||
from: 600
|
||||
execErrState: Error
|
||||
for: 5m0s
|
||||
noDataState: NoData
|
||||
title: Temperature below zero
|
||||
uid: 4843de5c-4f8a-4af0-9509-23526a04faf8
|
||||
334
fixtures/grafanaalertrulegroup_v1beta1.json
Normal file
334
fixtures/grafanaalertrulegroup_v1beta1.json
Normal file
|
|
@ -0,0 +1,334 @@
|
|||
{
|
||||
"description": "GrafanaAlertRuleGroup is the Schema for the grafanaalertrulegroups API",
|
||||
"properties": {
|
||||
"apiVersion": {
|
||||
"description": "APIVersion defines the versioned schema of this representation of an object.\nServers should convert recognized schemas to the latest internal value, and\nmay reject unrecognized values.\nMore info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources",
|
||||
"type": "string"
|
||||
},
|
||||
"kind": {
|
||||
"description": "Kind is a string value representing the REST resource this object represents.\nServers may infer this from the endpoint the client submits requests to.\nCannot be updated.\nIn CamelCase.\nMore info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds",
|
||||
"type": "string"
|
||||
},
|
||||
"metadata": {
|
||||
"type": "object"
|
||||
},
|
||||
"spec": {
|
||||
"description": "GrafanaAlertRuleGroupSpec defines the desired state of GrafanaAlertRuleGroup",
|
||||
"properties": {
|
||||
"allowCrossNamespaceImport": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"editable": {
|
||||
"description": "Whether to enable or disable editing of the alert rule group in Grafana UI",
|
||||
"type": "boolean",
|
||||
"x-kubernetes-validations": [
|
||||
{
|
||||
"message": "Value is immutable",
|
||||
"rule": "self == oldSelf"
|
||||
}
|
||||
]
|
||||
},
|
||||
"folderRef": {
|
||||
"description": "Match GrafanaFolders CRs to infer the uid",
|
||||
"type": "string"
|
||||
},
|
||||
"folderUID": {
|
||||
"description": "UID of the folder containing this rule group\nOverrides the FolderSelector",
|
||||
"type": "string"
|
||||
},
|
||||
"instanceSelector": {
|
||||
"description": "selects Grafanas for import",
|
||||
"properties": {
|
||||
"matchExpressions": {
|
||||
"description": "matchExpressions is a list of label selector requirements. The requirements are ANDed.",
|
||||
"items": {
|
||||
"description": "A label selector requirement is a selector that contains values, a key, and an operator that\nrelates the key and values.",
|
||||
"properties": {
|
||||
"key": {
|
||||
"description": "key is the label key that the selector applies to.",
|
||||
"type": "string"
|
||||
},
|
||||
"operator": {
|
||||
"description": "operator represents a key's relationship to a set of values.\nValid operators are In, NotIn, Exists and DoesNotExist.",
|
||||
"type": "string"
|
||||
},
|
||||
"values": {
|
||||
"description": "values is an array of string values. If the operator is In or NotIn,\nthe values array must be non-empty. If the operator is Exists or DoesNotExist,\nthe values array must be empty. This array is replaced during a strategic\nmerge patch.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array",
|
||||
"x-kubernetes-list-type": "atomic"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"key",
|
||||
"operator"
|
||||
],
|
||||
"type": "object",
|
||||
"additionalProperties": false
|
||||
},
|
||||
"type": "array",
|
||||
"x-kubernetes-list-type": "atomic"
|
||||
},
|
||||
"matchLabels": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is \"key\", the\noperator is \"In\", and the values array contains only \"value\". The requirements are ANDed.",
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"x-kubernetes-map-type": "atomic",
|
||||
"x-kubernetes-validations": [
|
||||
{
|
||||
"message": "Value is immutable",
|
||||
"rule": "self == oldSelf"
|
||||
}
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"interval": {
|
||||
"format": "duration",
|
||||
"pattern": "^([0-9]+(\\.[0-9]+)?(ns|us|\u00b5s|ms|s|m|h))+$",
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"description": "Name of the alert rule group. If not specified, the resource name will be used.",
|
||||
"type": "string"
|
||||
},
|
||||
"resyncPeriod": {
|
||||
"default": "10m",
|
||||
"format": "duration",
|
||||
"pattern": "^([0-9]+(\\.[0-9]+)?(ns|us|\u00b5s|ms|s|m|h))+$",
|
||||
"type": "string"
|
||||
},
|
||||
"rules": {
|
||||
"items": {
|
||||
"description": "AlertRule defines a specific rule to be evaluated. It is based on the upstream model with some k8s specific type mappings",
|
||||
"properties": {
|
||||
"annotations": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"condition": {
|
||||
"type": "string"
|
||||
},
|
||||
"data": {
|
||||
"items": {
|
||||
"properties": {
|
||||
"datasourceUid": {
|
||||
"description": "Grafana data source unique identifier; it should be '__expr__' for a Server Side Expression operation.",
|
||||
"type": "string"
|
||||
},
|
||||
"model": {
|
||||
"description": "JSON is the raw JSON query and includes the above properties as well as custom properties.",
|
||||
"x-kubernetes-preserve-unknown-fields": true
|
||||
},
|
||||
"queryType": {
|
||||
"description": "QueryType is an optional identifier for the type of query.\nIt can be used to distinguish different types of queries.",
|
||||
"type": "string"
|
||||
},
|
||||
"refId": {
|
||||
"description": "RefID is the unique identifier of the query, set by the frontend call.",
|
||||
"type": "string"
|
||||
},
|
||||
"relativeTimeRange": {
|
||||
"description": "relative time range",
|
||||
"properties": {
|
||||
"from": {
|
||||
"description": "from",
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
},
|
||||
"to": {
|
||||
"description": "to",
|
||||
"format": "int64",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"additionalProperties": false
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"execErrState": {
|
||||
"enum": [
|
||||
"OK",
|
||||
"Alerting",
|
||||
"Error",
|
||||
"KeepLast"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"for": {
|
||||
"format": "duration",
|
||||
"pattern": "^([0-9]+(\\.[0-9]+)?(ns|us|\u00b5s|ms|s|m|h))+$",
|
||||
"type": "string"
|
||||
},
|
||||
"isPaused": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"labels": {
|
||||
"additionalProperties": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"noDataState": {
|
||||
"enum": [
|
||||
"Alerting",
|
||||
"NoData",
|
||||
"OK",
|
||||
"KeepLast"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"notificationSettings": {
|
||||
"properties": {
|
||||
"group_by": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"group_interval": {
|
||||
"type": "string"
|
||||
},
|
||||
"group_wait": {
|
||||
"type": "string"
|
||||
},
|
||||
"mute_time_intervals": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"receiver": {
|
||||
"type": "string"
|
||||
},
|
||||
"repeat_interval": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"receiver"
|
||||
],
|
||||
"type": "object",
|
||||
"additionalProperties": false
|
||||
},
|
||||
"title": {
|
||||
"example": "Always firing",
|
||||
"maxLength": 190,
|
||||
"minLength": 1,
|
||||
"type": "string"
|
||||
},
|
||||
"uid": {
|
||||
"pattern": "^[a-zA-Z0-9-_]+$",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"condition",
|
||||
"data",
|
||||
"execErrState",
|
||||
"for",
|
||||
"noDataState",
|
||||
"title",
|
||||
"uid"
|
||||
],
|
||||
"type": "object",
|
||||
"additionalProperties": false
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"instanceSelector",
|
||||
"interval",
|
||||
"rules"
|
||||
],
|
||||
"type": "object",
|
||||
"x-kubernetes-validations": [
|
||||
{
|
||||
"message": "Only one of FolderUID or FolderRef can be set",
|
||||
"rule": "(has(self.folderUID) && !(has(self.folderRef))) || (has(self.folderRef) && !(has(self.folderUID)))"
|
||||
}
|
||||
],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"status": {
|
||||
"description": "GrafanaAlertRuleGroupStatus defines the observed state of GrafanaAlertRuleGroup",
|
||||
"properties": {
|
||||
"conditions": {
|
||||
"items": {
|
||||
"description": "Condition contains details for one aspect of the current state of this API Resource.",
|
||||
"properties": {
|
||||
"lastTransitionTime": {
|
||||
"description": "lastTransitionTime is the last time the condition transitioned from one status to another.\nThis should be when the underlying condition changed. If that is not known, then using the time when the API field changed is acceptable.",
|
||||
"format": "date-time",
|
||||
"type": "string"
|
||||
},
|
||||
"message": {
|
||||
"description": "message is a human readable message indicating details about the transition.\nThis may be an empty string.",
|
||||
"maxLength": 32768,
|
||||
"type": "string"
|
||||
},
|
||||
"observedGeneration": {
|
||||
"description": "observedGeneration represents the .metadata.generation that the condition was set based upon.\nFor instance, if .metadata.generation is currently 12, but the .status.conditions[x].observedGeneration is 9, the condition is out of date\nwith respect to the current state of the instance.",
|
||||
"format": "int64",
|
||||
"minimum": 0,
|
||||
"type": "integer"
|
||||
},
|
||||
"reason": {
|
||||
"description": "reason contains a programmatic identifier indicating the reason for the condition's last transition.\nProducers of specific condition types may define expected values and meanings for this field,\nand whether the values are considered a guaranteed API.\nThe value should be a CamelCase string.\nThis field may not be empty.",
|
||||
"maxLength": 1024,
|
||||
"minLength": 1,
|
||||
"pattern": "^[A-Za-z]([A-Za-z0-9_,:]*[A-Za-z0-9_])?$",
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"description": "status of the condition, one of True, False, Unknown.",
|
||||
"enum": [
|
||||
"True",
|
||||
"False",
|
||||
"Unknown"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"description": "type of condition in CamelCase or in foo.example.com/CamelCase.",
|
||||
"maxLength": 316,
|
||||
"pattern": "^([a-z0-9]([-a-z0-9]*[a-z0-9])?(\\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*/)?(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])$",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"lastTransitionTime",
|
||||
"message",
|
||||
"reason",
|
||||
"status",
|
||||
"type"
|
||||
],
|
||||
"type": "object",
|
||||
"additionalProperties": false
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"conditions"
|
||||
],
|
||||
"type": "object",
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"type": "object"
|
||||
}
|
||||
5
go.mod
5
go.mod
|
|
@ -1,10 +1,11 @@
|
|||
module github.com/yannh/kubeconform
|
||||
|
||||
go 1.22
|
||||
go 1.24
|
||||
|
||||
require (
|
||||
github.com/hashicorp/go-retryablehttp v0.7.7
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.1
|
||||
golang.org/x/text v0.25.0
|
||||
sigs.k8s.io/yaml v1.4.0
|
||||
)
|
||||
|
||||
|
|
|
|||
8
go.sum
8
go.sum
|
|
@ -1,3 +1,5 @@
|
|||
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
|
||||
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
|
||||
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
|
|
@ -12,10 +14,12 @@ github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxec
|
|||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4=
|
||||
github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY=
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw=
|
||||
github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
|
||||
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
|
||||
|
|
|
|||
4
pkg/cache/cache.go
vendored
4
pkg/cache/cache.go
vendored
|
|
@ -1,6 +1,6 @@
|
|||
package cache
|
||||
|
||||
type Cache interface {
|
||||
Get(resourceKind, resourceAPIVersion, k8sVersion string) (interface{}, error)
|
||||
Set(resourceKind, resourceAPIVersion, k8sVersion string, schema interface{}) error
|
||||
Get(key string) (any, error)
|
||||
Set(key string, schema any) error
|
||||
}
|
||||
|
|
|
|||
18
pkg/cache/inmemory.go
vendored
18
pkg/cache/inmemory.go
vendored
|
|
@ -10,26 +10,21 @@ import (
|
|||
// - This cache caches the parsed Schemas
|
||||
type inMemory struct {
|
||||
sync.RWMutex
|
||||
schemas map[string]interface{}
|
||||
schemas map[string]any
|
||||
}
|
||||
|
||||
// New creates a new cache for downloaded schemas
|
||||
func NewInMemoryCache() Cache {
|
||||
return &inMemory{
|
||||
schemas: map[string]interface{}{},
|
||||
schemas: make(map[string]any),
|
||||
}
|
||||
}
|
||||
|
||||
func key(resourceKind, resourceAPIVersion, k8sVersion string) string {
|
||||
return fmt.Sprintf("%s-%s-%s", resourceKind, resourceAPIVersion, k8sVersion)
|
||||
}
|
||||
|
||||
// Get retrieves the JSON schema given a resource signature
|
||||
func (c *inMemory) Get(resourceKind, resourceAPIVersion, k8sVersion string) (interface{}, error) {
|
||||
k := key(resourceKind, resourceAPIVersion, k8sVersion)
|
||||
func (c *inMemory) Get(key string) (any, error) {
|
||||
c.RLock()
|
||||
defer c.RUnlock()
|
||||
schema, ok := c.schemas[k]
|
||||
schema, ok := c.schemas[key]
|
||||
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("schema not found in in-memory cache")
|
||||
|
|
@ -39,11 +34,10 @@ func (c *inMemory) Get(resourceKind, resourceAPIVersion, k8sVersion string) (int
|
|||
}
|
||||
|
||||
// Set adds a JSON schema to the schema cache
|
||||
func (c *inMemory) Set(resourceKind, resourceAPIVersion, k8sVersion string, schema interface{}) error {
|
||||
k := key(resourceKind, resourceAPIVersion, k8sVersion)
|
||||
func (c *inMemory) Set(key string, schema any) error {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
c.schemas[k] = schema
|
||||
c.schemas[key] = schema
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
17
pkg/cache/ondisk.go
vendored
17
pkg/cache/ondisk.go
vendored
|
|
@ -3,7 +3,6 @@ package cache
|
|||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
|
|
@ -22,17 +21,17 @@ func NewOnDiskCache(cache string) Cache {
|
|||
}
|
||||
}
|
||||
|
||||
func cachePath(folder, resourceKind, resourceAPIVersion, k8sVersion string) string {
|
||||
hash := sha256.Sum256([]byte(fmt.Sprintf("%s-%s-%s", resourceKind, resourceAPIVersion, k8sVersion)))
|
||||
func cachePath(folder, key string) string {
|
||||
hash := sha256.Sum256([]byte(key))
|
||||
return path.Join(folder, hex.EncodeToString(hash[:]))
|
||||
}
|
||||
|
||||
// Get retrieves the JSON schema given a resource signature
|
||||
func (c *onDisk) Get(resourceKind, resourceAPIVersion, k8sVersion string) (interface{}, error) {
|
||||
func (c *onDisk) Get(key string) (any, error) {
|
||||
c.RLock()
|
||||
defer c.RUnlock()
|
||||
|
||||
f, err := os.Open(cachePath(c.folder, resourceKind, resourceAPIVersion, k8sVersion))
|
||||
f, err := os.Open(cachePath(c.folder, key))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -42,8 +41,12 @@ func (c *onDisk) Get(resourceKind, resourceAPIVersion, k8sVersion string) (inter
|
|||
}
|
||||
|
||||
// Set adds a JSON schema to the schema cache
|
||||
func (c *onDisk) Set(resourceKind, resourceAPIVersion, k8sVersion string, schema interface{}) error {
|
||||
func (c *onDisk) Set(key string, schema any) error {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
return os.WriteFile(cachePath(c.folder, resourceKind, resourceAPIVersion, k8sVersion), schema.([]byte), 0644)
|
||||
|
||||
if _, err := os.Stat(cachePath(c.folder, key)); os.IsNotExist(err) {
|
||||
return os.WriteFile(cachePath(c.folder, key), schema.([]byte), 0644)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
65
pkg/loader/file.go
Normal file
65
pkg/loader/file.go
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
package loader
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"io"
|
||||
gourl "net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// FileLoader loads json file url.
|
||||
type FileLoader struct {
|
||||
cache cache.Cache
|
||||
}
|
||||
|
||||
func (l FileLoader) Load(url string) (any, error) {
|
||||
path, err := l.ToFile(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
msg := fmt.Sprintf("could not open file %s", path)
|
||||
return nil, NewNotFoundError(errors.New(msg))
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
content, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return jsonschema.UnmarshalJSON(bytes.NewReader(content))
|
||||
}
|
||||
|
||||
// ToFile is helper method to convert file url to file path.
|
||||
func (l FileLoader) ToFile(url string) (string, error) {
|
||||
u, err := gourl.Parse(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if u.Scheme != "file" {
|
||||
return url, nil
|
||||
}
|
||||
path := u.Path
|
||||
if runtime.GOOS == "windows" {
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
path = filepath.FromSlash(path)
|
||||
}
|
||||
return path, nil
|
||||
}
|
||||
|
||||
func NewFileLoader() *FileLoader {
|
||||
return &FileLoader{}
|
||||
}
|
||||
85
pkg/loader/http.go
Normal file
85
pkg/loader/http.go
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
package loader
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/hashicorp/go-retryablehttp"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type HTTPURLLoader struct {
|
||||
client http.Client
|
||||
cache cache.Cache
|
||||
}
|
||||
|
||||
func (l *HTTPURLLoader) Load(url string) (any, error) {
|
||||
if l.cache != nil {
|
||||
if cached, err := l.cache.Get(url); err == nil {
|
||||
return jsonschema.UnmarshalJSON(bytes.NewReader(cached.([]byte)))
|
||||
}
|
||||
}
|
||||
|
||||
resp, err := l.client.Get(url)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed downloading schema at %s: %s", url, err)
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusNotFound {
|
||||
msg := fmt.Sprintf("could not find schema at %s", url)
|
||||
return nil, NewNotFoundError(errors.New(msg))
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
msg := fmt.Sprintf("error while downloading schema at %s - received HTTP status %d", url, resp.StatusCode)
|
||||
return nil, fmt.Errorf("%s", msg)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed parsing schema from %s: %s", url, err)
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
|
||||
if l.cache != nil {
|
||||
if err = l.cache.Set(url, body); err != nil {
|
||||
return nil, fmt.Errorf("failed to write cache to disk: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return nil, NewNonJSONResponseError(err)
|
||||
}
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func NewHTTPURLLoader(skipTLS bool, cache cache.Cache) (*HTTPURLLoader, error) {
|
||||
transport := &http.Transport{
|
||||
MaxIdleConns: 100,
|
||||
IdleConnTimeout: 3 * time.Second,
|
||||
DisableCompression: true,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
|
||||
if skipTLS {
|
||||
transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
|
||||
// retriable http client
|
||||
retryClient := retryablehttp.NewClient()
|
||||
retryClient.RetryMax = 2
|
||||
retryClient.HTTPClient = &http.Client{Transport: transport}
|
||||
retryClient.Logger = nil
|
||||
|
||||
httpLoader := HTTPURLLoader{client: *retryClient.StandardClient(), cache: cache}
|
||||
return &httpLoader, nil
|
||||
}
|
||||
216
pkg/loader/http_test.go
Normal file
216
pkg/loader/http_test.go
Normal file
|
|
@ -0,0 +1,216 @@
|
|||
package loader
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"sync"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type mockCache struct {
|
||||
data map[string]any
|
||||
}
|
||||
|
||||
func (m *mockCache) Get(key string) (any, error) {
|
||||
if val, ok := m.data[key]; ok {
|
||||
return val, nil
|
||||
}
|
||||
return nil, errors.New("cache miss")
|
||||
}
|
||||
|
||||
func (m *mockCache) Set(key string, value any) error {
|
||||
m.data[key] = value
|
||||
return nil
|
||||
}
|
||||
|
||||
// Test basic functionality of HTTPURLLoader
|
||||
func TestHTTPURLLoader_Load(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
mockResponse string
|
||||
mockStatusCode int
|
||||
cacheEnabled bool
|
||||
expectError bool
|
||||
expectCacheHit bool
|
||||
}{
|
||||
{
|
||||
name: "successful load",
|
||||
mockResponse: `{"type": "object"}`,
|
||||
mockStatusCode: http.StatusOK,
|
||||
cacheEnabled: false,
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "not found error",
|
||||
mockResponse: "",
|
||||
mockStatusCode: http.StatusNotFound,
|
||||
cacheEnabled: false,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "server error",
|
||||
mockResponse: "",
|
||||
mockStatusCode: http.StatusInternalServerError,
|
||||
cacheEnabled: false,
|
||||
expectError: true,
|
||||
},
|
||||
{
|
||||
name: "cache hit",
|
||||
mockResponse: `{"type": "object"}`,
|
||||
mockStatusCode: http.StatusOK,
|
||||
cacheEnabled: true,
|
||||
expectError: false,
|
||||
expectCacheHit: true,
|
||||
},
|
||||
{
|
||||
name: "Partial response from server",
|
||||
mockResponse: `{"type": "objec`,
|
||||
mockStatusCode: http.StatusOK,
|
||||
cacheEnabled: false,
|
||||
expectError: true,
|
||||
expectCacheHit: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Mock HTTP server
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(tt.mockStatusCode)
|
||||
w.Write([]byte(tt.mockResponse))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
// Create HTTPURLLoader
|
||||
loader := &HTTPURLLoader{
|
||||
client: *server.Client(),
|
||||
cache: nil,
|
||||
}
|
||||
|
||||
if tt.cacheEnabled {
|
||||
loader.cache = &mockCache{data: map[string]any{}}
|
||||
if tt.expectCacheHit {
|
||||
loader.cache.Set(server.URL, []byte(tt.mockResponse))
|
||||
}
|
||||
}
|
||||
|
||||
// Call Load and handle errors
|
||||
res, err := loader.Load(server.URL)
|
||||
if tt.expectError {
|
||||
if err == nil {
|
||||
t.Errorf("expected error, got nil")
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
}
|
||||
if res == nil {
|
||||
t.Errorf("expected non-nil result, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test basic functionality of HTTPURLLoader
|
||||
func TestHTTPURLLoader_Load_Retries(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
url string
|
||||
expectError bool
|
||||
expectCallCount int
|
||||
consecutiveFailures int
|
||||
}{
|
||||
{
|
||||
name: "retries on 503",
|
||||
url: "/503",
|
||||
expectError: false,
|
||||
expectCallCount: 2,
|
||||
consecutiveFailures: 2,
|
||||
},
|
||||
{
|
||||
name: "fails when hitting max retries",
|
||||
url: "/503",
|
||||
expectError: true,
|
||||
expectCallCount: 3,
|
||||
consecutiveFailures: 5,
|
||||
},
|
||||
{
|
||||
name: "retry on connection reset",
|
||||
url: "/simulate-reset",
|
||||
expectError: false,
|
||||
expectCallCount: 2,
|
||||
consecutiveFailures: 1,
|
||||
},
|
||||
{
|
||||
name: "retry on connection reset",
|
||||
url: "/simulate-reset",
|
||||
expectError: true,
|
||||
expectCallCount: 3,
|
||||
consecutiveFailures: 5,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ccMutex := &sync.Mutex{}
|
||||
callCounts := map[string]int{}
|
||||
// Mock HTTP server
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
ccMutex.Lock()
|
||||
callCounts[r.URL.Path]++
|
||||
callCount := callCounts[r.URL.Path]
|
||||
ccMutex.Unlock()
|
||||
|
||||
switch r.URL.Path {
|
||||
case "/simulate-reset":
|
||||
if callCount <= tt.consecutiveFailures {
|
||||
if hj, ok := w.(http.Hijacker); ok {
|
||||
conn, _, err := hj.Hijack()
|
||||
if err != nil {
|
||||
fmt.Printf("Hijacking failed: %v\n", err)
|
||||
return
|
||||
}
|
||||
conn.Close() // Close the connection to simulate a reset
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte(`{"type": "object"}`))
|
||||
|
||||
case "/503":
|
||||
s := http.StatusServiceUnavailable
|
||||
if callCount >= tt.consecutiveFailures {
|
||||
s = http.StatusOK
|
||||
}
|
||||
w.WriteHeader(s)
|
||||
w.Write([]byte(`{"type": "object"}`))
|
||||
}
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
// Create HTTPURLLoader
|
||||
loader, _ := NewHTTPURLLoader(false, nil)
|
||||
|
||||
fullurl := server.URL + tt.url
|
||||
// Call Load and handle errors
|
||||
_, err := loader.Load(fullurl)
|
||||
if tt.expectError && err == nil {
|
||||
t.Error("expected error, got none")
|
||||
}
|
||||
if !tt.expectError && err != nil {
|
||||
t.Errorf("expected no error, got %v", err)
|
||||
}
|
||||
ccMutex.Lock()
|
||||
if callCounts[tt.url] != tt.expectCallCount {
|
||||
t.Errorf("expected %d calls, got: %d", tt.expectCallCount, callCounts[tt.url])
|
||||
}
|
||||
ccMutex.Unlock()
|
||||
})
|
||||
}
|
||||
}
|
||||
22
pkg/loader/loaders.go
Normal file
22
pkg/loader/loaders.go
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
package loader
|
||||
|
||||
// NotFoundError is returned when the registry does not contain a schema for the resource
|
||||
type NotFoundError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func NewNotFoundError(err error) *NotFoundError {
|
||||
return &NotFoundError{err}
|
||||
}
|
||||
func (e *NotFoundError) Error() string { return e.err.Error() }
|
||||
func (e *NotFoundError) Retryable() bool { return false }
|
||||
|
||||
type NonJSONResponseError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func NewNonJSONResponseError(err error) *NotFoundError {
|
||||
return &NotFoundError{err}
|
||||
}
|
||||
func (e *NonJSONResponseError) Error() string { return e.err.Error() }
|
||||
func (e *NonJSONResponseError) Retryable() bool { return false }
|
||||
|
|
@ -1,129 +1,34 @@
|
|||
package registry
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
retryablehttp "github.com/hashicorp/go-retryablehttp"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
)
|
||||
|
||||
type httpGetter interface {
|
||||
Get(url string) (resp *http.Response, err error)
|
||||
}
|
||||
|
||||
// SchemaRegistry is a file repository (local or remote) that contains JSON schemas for Kubernetes resources
|
||||
type SchemaRegistry struct {
|
||||
c httpGetter
|
||||
schemaPathTemplate string
|
||||
cache cache.Cache
|
||||
strict bool
|
||||
debug bool
|
||||
loader jsonschema.URLLoader
|
||||
}
|
||||
|
||||
func newHTTPRegistry(schemaPathTemplate string, cacheFolder string, strict bool, skipTLS bool, debug bool) (*SchemaRegistry, error) {
|
||||
reghttp := &http.Transport{
|
||||
MaxIdleConns: 100,
|
||||
IdleConnTimeout: 3 * time.Second,
|
||||
DisableCompression: true,
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
|
||||
if skipTLS {
|
||||
reghttp.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||
}
|
||||
|
||||
var filecache cache.Cache = nil
|
||||
if cacheFolder != "" {
|
||||
fi, err := os.Stat(cacheFolder)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed opening cache folder %s: %s", cacheFolder, err)
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return nil, fmt.Errorf("cache folder %s is not a directory", err)
|
||||
}
|
||||
|
||||
filecache = cache.NewOnDiskCache(cacheFolder)
|
||||
}
|
||||
|
||||
// retriable http client
|
||||
retryClient := retryablehttp.NewClient()
|
||||
retryClient.RetryMax = 2
|
||||
retryClient.HTTPClient = &http.Client{Transport: reghttp}
|
||||
retryClient.Logger = nil
|
||||
|
||||
func newHTTPRegistry(schemaPathTemplate string, loader jsonschema.URLLoader, strict bool, debug bool) (*SchemaRegistry, error) {
|
||||
return &SchemaRegistry{
|
||||
c: retryClient.StandardClient(),
|
||||
schemaPathTemplate: schemaPathTemplate,
|
||||
cache: filecache,
|
||||
strict: strict,
|
||||
loader: loader,
|
||||
debug: debug,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// DownloadSchema downloads the schema for a particular resource from an HTTP server
|
||||
func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error) {
|
||||
func (r SchemaRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, any, error) {
|
||||
url, err := schemaPath(r.schemaPathTemplate, resourceKind, resourceAPIVersion, k8sVersion, r.strict)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
if r.cache != nil {
|
||||
if b, err := r.cache.Get(resourceKind, resourceAPIVersion, k8sVersion); err == nil {
|
||||
return url, b.([]byte), nil
|
||||
}
|
||||
}
|
||||
resp, err := r.loader.Load(url)
|
||||
|
||||
resp, err := r.c.Get(url)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed downloading schema at %s: %s", url, err)
|
||||
if r.debug {
|
||||
log.Println(msg)
|
||||
}
|
||||
return url, nil, errors.New(msg)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusNotFound {
|
||||
msg := fmt.Sprintf("could not find schema at %s", url)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return url, nil, newNotFoundError(errors.New(msg))
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
msg := fmt.Sprintf("error while downloading schema at %s - received HTTP status %d", url, resp.StatusCode)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return url, nil, fmt.Errorf(msg)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed parsing schema from %s: %s", url, err)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return url, nil, errors.New(msg)
|
||||
}
|
||||
|
||||
if r.debug {
|
||||
log.Printf("using schema found at %s", url)
|
||||
}
|
||||
|
||||
if r.cache != nil {
|
||||
if err := r.cache.Set(resourceKind, resourceAPIVersion, k8sVersion, body); err != nil {
|
||||
return url, nil, fmt.Errorf("failed writing schema to cache: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
return url, body, nil
|
||||
return url, resp, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,180 +0,0 @@
|
|||
package registry
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type mockHTTPGetter struct {
|
||||
callNumber int
|
||||
httpGet func(mockHTTPGetter, string) (*http.Response, error)
|
||||
}
|
||||
|
||||
func newMockHTTPGetter(f func(mockHTTPGetter, string) (*http.Response, error)) *mockHTTPGetter {
|
||||
return &mockHTTPGetter{
|
||||
callNumber: 0,
|
||||
httpGet: f,
|
||||
}
|
||||
}
|
||||
func (m *mockHTTPGetter) Get(url string) (resp *http.Response, err error) {
|
||||
m.callNumber = m.callNumber + 1
|
||||
return m.httpGet(*m, url)
|
||||
}
|
||||
|
||||
func TestDownloadSchema(t *testing.T) {
|
||||
callCounts := map[string]int{}
|
||||
|
||||
// http server to simulate different responses
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
var s int
|
||||
callCounts[r.URL.Path]++
|
||||
callCount := callCounts[r.URL.Path]
|
||||
|
||||
switch r.URL.Path {
|
||||
case "/404":
|
||||
s = http.StatusNotFound
|
||||
case "/500":
|
||||
s = http.StatusInternalServerError
|
||||
case "/503":
|
||||
if callCount < 2 {
|
||||
s = http.StatusServiceUnavailable
|
||||
} else {
|
||||
s = http.StatusOK // Should succeed on 3rd try
|
||||
}
|
||||
|
||||
case "/simulate-reset":
|
||||
if callCount < 2 {
|
||||
if hj, ok := w.(http.Hijacker); ok {
|
||||
conn, _, err := hj.Hijack()
|
||||
if err != nil {
|
||||
fmt.Printf("Hijacking failed: %v\n", err)
|
||||
return
|
||||
}
|
||||
conn.Close() // Close the connection to simulate a reset
|
||||
}
|
||||
return
|
||||
}
|
||||
s = http.StatusOK // Should succeed on third try
|
||||
|
||||
default:
|
||||
s = http.StatusOK
|
||||
}
|
||||
|
||||
w.WriteHeader(s)
|
||||
w.Write([]byte(http.StatusText(s)))
|
||||
})
|
||||
|
||||
port := fmt.Sprint(rand.Intn(1000) + 9000) // random port
|
||||
server := &http.Server{Addr: "127.0.0.1:" + port}
|
||||
url := fmt.Sprintf("http://localhost:%s", port)
|
||||
|
||||
go func() {
|
||||
if err := server.ListenAndServe(); err != nil {
|
||||
fmt.Printf("Failed to start server: %v\n", err)
|
||||
}
|
||||
}()
|
||||
defer server.Shutdown(nil)
|
||||
|
||||
// Wait for the server to start
|
||||
for i := 0; i < 20; i++ {
|
||||
if _, err := http.Get(url); err == nil {
|
||||
break
|
||||
}
|
||||
|
||||
if i == 19 {
|
||||
t.Error("http server did not start")
|
||||
return
|
||||
}
|
||||
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
}
|
||||
|
||||
for _, testCase := range []struct {
|
||||
name string
|
||||
schemaPathTemplate string
|
||||
strict bool
|
||||
resourceKind, resourceAPIVersion, k8sversion string
|
||||
expect []byte
|
||||
expectErr error
|
||||
}{
|
||||
{
|
||||
"retry connection reset by peer",
|
||||
fmt.Sprintf("%s/simulate-reset", url),
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
[]byte(http.StatusText(http.StatusOK)),
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"getting 404",
|
||||
fmt.Sprintf("%s/404", url),
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
nil,
|
||||
fmt.Errorf("could not find schema at %s/404", url),
|
||||
},
|
||||
{
|
||||
"getting 500",
|
||||
fmt.Sprintf("%s/500", url),
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
nil,
|
||||
fmt.Errorf("failed downloading schema at %s/500: Get \"%s/500\": GET %s/500 giving up after 3 attempt(s)", url, url, url),
|
||||
},
|
||||
{
|
||||
"retry 503",
|
||||
fmt.Sprintf("%s/503", url),
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
[]byte(http.StatusText(http.StatusOK)),
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"200",
|
||||
url,
|
||||
true,
|
||||
"Deployment",
|
||||
"v1",
|
||||
"1.18.0",
|
||||
[]byte(http.StatusText(http.StatusOK)),
|
||||
nil,
|
||||
},
|
||||
} {
|
||||
callCounts = map[string]int{} // Reinitialise counters
|
||||
|
||||
reg, err := newHTTPRegistry(testCase.schemaPathTemplate, "", testCase.strict, true, true)
|
||||
if err != nil {
|
||||
t.Errorf("during test '%s': failed to create registry: %s", testCase.name, err)
|
||||
continue
|
||||
}
|
||||
|
||||
_, res, err := reg.DownloadSchema(testCase.resourceKind, testCase.resourceAPIVersion, testCase.k8sversion)
|
||||
if err == nil || testCase.expectErr == nil {
|
||||
if err == nil && testCase.expectErr != nil {
|
||||
t.Errorf("during test '%s': expected error\n%s, got nil", testCase.name, testCase.expectErr)
|
||||
}
|
||||
if err != nil && testCase.expectErr == nil {
|
||||
t.Errorf("during test '%s': expected no error, got\n%s\n", testCase.name, err)
|
||||
}
|
||||
} else if err.Error() != testCase.expectErr.Error() {
|
||||
t.Errorf("during test '%s': expected error\n%s, got:\n%s\n", testCase.name, testCase.expectErr, err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(res, testCase.expect) {
|
||||
t.Errorf("during test '%s': expected '%s', got '%s'", testCase.name, testCase.expect, res)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,63 +1,33 @@
|
|||
package registry
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
)
|
||||
|
||||
type LocalRegistry struct {
|
||||
pathTemplate string
|
||||
strict bool
|
||||
debug bool
|
||||
loader jsonschema.URLLoader
|
||||
}
|
||||
|
||||
// NewLocalSchemas creates a new "registry", that will serve schemas from files, given a list of schema filenames
|
||||
func newLocalRegistry(pathTemplate string, strict bool, debug bool) (*LocalRegistry, error) {
|
||||
func newLocalRegistry(pathTemplate string, loader jsonschema.URLLoader, strict bool, debug bool) (*LocalRegistry, error) {
|
||||
return &LocalRegistry{
|
||||
pathTemplate,
|
||||
strict,
|
||||
debug,
|
||||
loader,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// DownloadSchema retrieves the schema from a file for the resource
|
||||
func (r LocalRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error) {
|
||||
func (r LocalRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, any, error) {
|
||||
schemaFile, err := schemaPath(r.pathTemplate, resourceKind, resourceAPIVersion, k8sVersion, r.strict)
|
||||
if err != nil {
|
||||
return schemaFile, []byte{}, nil
|
||||
}
|
||||
f, err := os.Open(schemaFile)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
msg := fmt.Sprintf("could not open file %s", schemaFile)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return schemaFile, nil, newNotFoundError(errors.New(msg))
|
||||
}
|
||||
|
||||
msg := fmt.Sprintf("failed to open schema at %s: %s", schemaFile, err)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return schemaFile, nil, errors.New(msg)
|
||||
}
|
||||
|
||||
defer f.Close()
|
||||
content, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
msg := fmt.Sprintf("failed to read schema at %s: %s", schemaFile, err)
|
||||
if r.debug {
|
||||
log.Print(msg)
|
||||
}
|
||||
return schemaFile, nil, err
|
||||
}
|
||||
|
||||
if r.debug {
|
||||
log.Printf("using schema found at %s", schemaFile)
|
||||
}
|
||||
return schemaFile, content, nil
|
||||
s, err := r.loader.Load(schemaFile)
|
||||
return schemaFile, s, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,9 @@ package registry
|
|||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"github.com/yannh/kubeconform/pkg/loader"
|
||||
"os"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
|
@ -13,25 +16,9 @@ type Manifest struct {
|
|||
|
||||
// Registry is an interface that should be implemented by any source of Kubernetes schemas
|
||||
type Registry interface {
|
||||
DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error)
|
||||
DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, any, error)
|
||||
}
|
||||
|
||||
// Retryable indicates whether an error is a temporary or a permanent failure
|
||||
type Retryable interface {
|
||||
IsNotFound() bool
|
||||
}
|
||||
|
||||
// NotFoundError is returned when the registry does not contain a schema for the resource
|
||||
type NotFoundError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func newNotFoundError(err error) *NotFoundError {
|
||||
return &NotFoundError{err}
|
||||
}
|
||||
func (e *NotFoundError) Error() string { return e.err.Error() }
|
||||
func (e *NotFoundError) Retryable() bool { return false }
|
||||
|
||||
func schemaPath(tpl, resourceKind, resourceAPIVersion, k8sVersion string, strict bool) (string, error) {
|
||||
normalisedVersion := k8sVersion
|
||||
if normalisedVersion != "master" {
|
||||
|
|
@ -81,7 +68,7 @@ func schemaPath(tpl, resourceKind, resourceAPIVersion, k8sVersion string, strict
|
|||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func New(schemaLocation string, cache string, strict bool, skipTLS bool, debug bool) (Registry, error) {
|
||||
func New(schemaLocation string, cacheFolder string, strict bool, skipTLS bool, debug bool) (Registry, error) {
|
||||
if schemaLocation == "default" {
|
||||
schemaLocation = "https://raw.githubusercontent.com/yannh/kubernetes-json-schema/master/{{ .NormalizedKubernetesVersion }}-standalone{{ .StrictSuffix }}/{{ .ResourceKind }}{{ .KindSuffix }}.json"
|
||||
} else if !strings.HasSuffix(schemaLocation, "json") { // If we dont specify a full templated path, we assume the paths of our fork of kubernetes-json-schema
|
||||
|
|
@ -93,9 +80,27 @@ func New(schemaLocation string, cache string, strict bool, skipTLS bool, debug b
|
|||
return nil, fmt.Errorf("failed initialising schema location registry: %s", err)
|
||||
}
|
||||
|
||||
if strings.HasPrefix(schemaLocation, "http") {
|
||||
return newHTTPRegistry(schemaLocation, cache, strict, skipTLS, debug)
|
||||
var c cache.Cache = nil
|
||||
if cacheFolder != "" {
|
||||
fi, err := os.Stat(cacheFolder)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed opening cache folder %s: %s", cacheFolder, err)
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return nil, fmt.Errorf("cache folder %s is not a directory", err)
|
||||
}
|
||||
|
||||
c = cache.NewOnDiskCache(cacheFolder)
|
||||
}
|
||||
|
||||
return newLocalRegistry(schemaLocation, strict, debug)
|
||||
if strings.HasPrefix(schemaLocation, "http") {
|
||||
httpLoader, err := loader.NewHTTPURLLoader(skipTLS, c)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed creating HTTP loader: %s", err)
|
||||
}
|
||||
return newHTTPRegistry(schemaLocation, httpLoader, strict, debug)
|
||||
}
|
||||
|
||||
fileLoader := loader.NewFileLoader()
|
||||
return newLocalRegistry(schemaLocation, fileLoader, strict, debug)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,18 +2,21 @@
|
|||
package validator
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
jsonschema "github.com/santhosh-tekuri/jsonschema/v5"
|
||||
_ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
jsonschema "github.com/santhosh-tekuri/jsonschema/v6"
|
||||
"github.com/yannh/kubeconform/pkg/cache"
|
||||
"github.com/yannh/kubeconform/pkg/loader"
|
||||
"github.com/yannh/kubeconform/pkg/registry"
|
||||
"github.com/yannh/kubeconform/pkg/resource"
|
||||
"golang.org/x/text/language"
|
||||
"golang.org/x/text/message"
|
||||
"io"
|
||||
"os"
|
||||
"sigs.k8s.io/yaml"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Different types of validation results
|
||||
|
|
@ -60,7 +63,7 @@ type Opts struct {
|
|||
SkipKinds map[string]struct{} // List of resource Kinds to ignore
|
||||
RejectKinds map[string]struct{} // List of resource Kinds to reject
|
||||
KubernetesVersion string // Kubernetes Version - has to match one in https://github.com/instrumenta/kubernetes-json-schema
|
||||
Strict bool // thros an error if resources contain undocumented fields
|
||||
Strict bool // Throws an error if resources contain undocumented fields
|
||||
IgnoreMissingSchemas bool // skip a resource if no schema for that resource can be found
|
||||
}
|
||||
|
||||
|
|
@ -92,19 +95,48 @@ func New(schemaLocations []string, opts Opts) (Validator, error) {
|
|||
opts.RejectKinds = map[string]struct{}{}
|
||||
}
|
||||
|
||||
var filecache cache.Cache = nil
|
||||
if opts.Cache != "" {
|
||||
fi, err := os.Stat(opts.Cache)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed opening cache folder %s: %s", opts.Cache, err)
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return nil, fmt.Errorf("cache folder %s is not a directory", err)
|
||||
}
|
||||
|
||||
filecache = cache.NewOnDiskCache(opts.Cache)
|
||||
}
|
||||
|
||||
httpLoader, err := loader.NewHTTPURLLoader(false, filecache)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed creating HTTP loader: %s", err)
|
||||
}
|
||||
|
||||
return &v{
|
||||
opts: opts,
|
||||
schemaDownload: downloadSchema,
|
||||
schemaCache: cache.NewInMemoryCache(),
|
||||
regs: registries,
|
||||
opts: opts,
|
||||
schemaDownload: downloadSchema,
|
||||
schemaMemoryCache: cache.NewInMemoryCache(),
|
||||
regs: registries,
|
||||
loader: jsonschema.SchemeURLLoader{
|
||||
"file": jsonschema.FileLoader{},
|
||||
"http": httpLoader,
|
||||
"https": httpLoader,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
type v struct {
|
||||
opts Opts
|
||||
schemaCache cache.Cache
|
||||
schemaDownload func(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error)
|
||||
regs []registry.Registry
|
||||
opts Opts
|
||||
schemaDiskCache cache.Cache
|
||||
schemaMemoryCache cache.Cache
|
||||
schemaDownload func(registries []registry.Registry, loader jsonschema.SchemeURLLoader, kind, version, k8sVersion string) (*jsonschema.Schema, error)
|
||||
regs []registry.Registry
|
||||
loader jsonschema.SchemeURLLoader
|
||||
}
|
||||
|
||||
func key(resourceKind, resourceAPIVersion, k8sVersion string) string {
|
||||
return fmt.Sprintf("%s-%s-%s", resourceKind, resourceAPIVersion, k8sVersion)
|
||||
}
|
||||
|
||||
// ValidateResource validates a single resource. This allows to validate
|
||||
|
|
@ -165,8 +197,8 @@ func (val *v) ValidateResource(res resource.Resource) Result {
|
|||
cached := false
|
||||
var schema *jsonschema.Schema
|
||||
|
||||
if val.schemaCache != nil {
|
||||
s, err := val.schemaCache.Get(sig.Kind, sig.Version, val.opts.KubernetesVersion)
|
||||
if val.schemaMemoryCache != nil {
|
||||
s, err := val.schemaMemoryCache.Get(key(sig.Kind, sig.Version, val.opts.KubernetesVersion))
|
||||
if err == nil {
|
||||
cached = true
|
||||
schema = s.(*jsonschema.Schema)
|
||||
|
|
@ -174,12 +206,12 @@ func (val *v) ValidateResource(res resource.Resource) Result {
|
|||
}
|
||||
|
||||
if !cached {
|
||||
if schema, err = val.schemaDownload(val.regs, sig.Kind, sig.Version, val.opts.KubernetesVersion); err != nil {
|
||||
if schema, err = val.schemaDownload(val.regs, val.loader, sig.Kind, sig.Version, val.opts.KubernetesVersion); err != nil {
|
||||
return Result{Resource: res, Err: err, Status: Error}
|
||||
}
|
||||
|
||||
if val.schemaCache != nil {
|
||||
val.schemaCache.Set(sig.Kind, sig.Version, val.opts.KubernetesVersion, schema)
|
||||
if val.schemaMemoryCache != nil {
|
||||
val.schemaMemoryCache.Set(key(sig.Kind, sig.Version, val.opts.KubernetesVersion), schema)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -197,17 +229,22 @@ func (val *v) ValidateResource(res resource.Resource) Result {
|
|||
var e *jsonschema.ValidationError
|
||||
if errors.As(err, &e) {
|
||||
for _, ve := range e.Causes {
|
||||
path := ""
|
||||
for _, f := range ve.InstanceLocation {
|
||||
path = path + "/" + f
|
||||
}
|
||||
validationErrors = append(validationErrors, ValidationError{
|
||||
Path: ve.InstanceLocation,
|
||||
Msg: ve.Message,
|
||||
Path: path,
|
||||
Msg: ve.ErrorKind.LocalizedString(message.NewPrinter(language.English)),
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return Result{
|
||||
Resource: res,
|
||||
Status: Invalid,
|
||||
Err: fmt.Errorf("problem validating schema. Check JSON formatting: %s", err),
|
||||
Err: fmt.Errorf("problem validating schema. Check JSON formatting: %s", strings.ReplaceAll(err.Error(), "\n", " ")),
|
||||
ValidationErrors: validationErrors,
|
||||
}
|
||||
}
|
||||
|
|
@ -248,17 +285,98 @@ func (val *v) Validate(filename string, r io.ReadCloser) []Result {
|
|||
return val.ValidateWithContext(context.Background(), filename, r)
|
||||
}
|
||||
|
||||
func downloadSchema(registries []registry.Registry, kind, version, k8sVersion string) (*jsonschema.Schema, error) {
|
||||
// validateDuration is a custom validator for the duration format
|
||||
// as JSONSchema only supports the ISO 8601 format, i.e. `PT1H30M`,
|
||||
// while Kubernetes API machinery expects the Go duration format, i.e. `1h30m`
|
||||
// which is commonly used in Kubernetes operators for specifying intervals.
|
||||
// https://github.com/kubernetes/apiextensions-apiserver/blob/1ecd29f74da0639e2e6e3b8fac0c9bfd217e05eb/pkg/apis/apiextensions/v1/types_jsonschema.go#L71
|
||||
func validateDuration(v any) error {
|
||||
// Try validation with the Go duration format
|
||||
if _, err := time.ParseDuration(v.(string)); err == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// must start with 'P'
|
||||
s, ok = strings.CutPrefix(s, "P")
|
||||
if !ok {
|
||||
return fmt.Errorf("must start with P")
|
||||
}
|
||||
if s == "" {
|
||||
return fmt.Errorf("nothing after P")
|
||||
}
|
||||
|
||||
// dur-week
|
||||
if s, ok := strings.CutSuffix(s, "W"); ok {
|
||||
if s == "" {
|
||||
return fmt.Errorf("no number in week")
|
||||
}
|
||||
for _, ch := range s {
|
||||
if ch < '0' || ch > '9' {
|
||||
return fmt.Errorf("invalid week")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
allUnits := []string{"YMD", "HMS"}
|
||||
for i, s := range strings.Split(s, "T") {
|
||||
if i != 0 && s == "" {
|
||||
return fmt.Errorf("no time elements")
|
||||
}
|
||||
if i >= len(allUnits) {
|
||||
return fmt.Errorf("more than one T")
|
||||
}
|
||||
units := allUnits[i]
|
||||
for s != "" {
|
||||
digitCount := 0
|
||||
for _, ch := range s {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
digitCount++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if digitCount == 0 {
|
||||
return fmt.Errorf("missing number")
|
||||
}
|
||||
s = s[digitCount:]
|
||||
if s == "" {
|
||||
return fmt.Errorf("missing unit")
|
||||
}
|
||||
unit := s[0]
|
||||
j := strings.IndexByte(units, unit)
|
||||
if j == -1 {
|
||||
if strings.IndexByte(allUnits[i], unit) != -1 {
|
||||
return fmt.Errorf("unit %q out of order", unit)
|
||||
}
|
||||
return fmt.Errorf("invalid unit %q", unit)
|
||||
}
|
||||
units = units[j+1:]
|
||||
s = s[1:]
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func downloadSchema(registries []registry.Registry, l jsonschema.SchemeURLLoader, kind, version, k8sVersion string) (*jsonschema.Schema, error) {
|
||||
var err error
|
||||
var schemaBytes []byte
|
||||
var path string
|
||||
var s any
|
||||
|
||||
for _, reg := range registries {
|
||||
path, schemaBytes, err = reg.DownloadSchema(kind, version, k8sVersion)
|
||||
path, s, err = reg.DownloadSchema(kind, version, k8sVersion)
|
||||
if err == nil {
|
||||
c := jsonschema.NewCompiler()
|
||||
c.Draft = jsonschema.Draft4
|
||||
if err := c.AddResource(path, bytes.NewReader(schemaBytes)); err != nil {
|
||||
c.RegisterFormat(&jsonschema.Format{"duration", validateDuration})
|
||||
c.UseLoader(l)
|
||||
c.DefaultDraft(jsonschema.Draft4)
|
||||
if err := c.AddResource(path, s); err != nil {
|
||||
continue
|
||||
}
|
||||
schema, err := c.Compile(path)
|
||||
|
|
@ -266,11 +384,13 @@ func downloadSchema(registries []registry.Registry, kind, version, k8sVersion st
|
|||
if err != nil {
|
||||
continue
|
||||
}
|
||||
return schema, err
|
||||
return schema, nil
|
||||
}
|
||||
|
||||
// If we get a 404, we try the next registry, but we exit if we get a real failure
|
||||
if _, notfound := err.(*registry.NotFoundError); notfound {
|
||||
if _, notfound := err.(*loader.NotFoundError); notfound {
|
||||
continue
|
||||
}
|
||||
if _, nonJSONError := err.(*loader.NonJSONResponseError); nonJSONError {
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,8 @@ package validator
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/santhosh-tekuri/jsonschema/v6"
|
||||
"github.com/yannh/kubeconform/pkg/loader"
|
||||
"io"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
|
@ -12,16 +14,16 @@ import (
|
|||
)
|
||||
|
||||
type mockRegistry struct {
|
||||
SchemaDownloader func() (string, []byte, error)
|
||||
SchemaDownloader func() (string, any, error)
|
||||
}
|
||||
|
||||
func newMockRegistry(f func() (string, []byte, error)) *mockRegistry {
|
||||
func newMockRegistry(f func() (string, any, error)) *mockRegistry {
|
||||
return &mockRegistry{
|
||||
SchemaDownloader: f,
|
||||
}
|
||||
}
|
||||
|
||||
func (m mockRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, []byte, error) {
|
||||
func (m mockRegistry) DownloadSchema(resourceKind, resourceAPIVersion, k8sVersion string) (string, any, error) {
|
||||
return m.SchemaDownloader()
|
||||
}
|
||||
|
||||
|
|
@ -106,7 +108,7 @@ lastName: bar
|
|||
[]ValidationError{
|
||||
{
|
||||
Path: "/firstName",
|
||||
Msg: "expected number, but got string",
|
||||
Msg: "got string, want number",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -145,7 +147,7 @@ firstName: foo
|
|||
[]ValidationError{
|
||||
{
|
||||
Path: "",
|
||||
Msg: "missing properties: 'lastName'",
|
||||
Msg: "missing property 'lastName'",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -377,6 +379,87 @@ lastName: bar
|
|||
Error,
|
||||
[]ValidationError{},
|
||||
},
|
||||
{
|
||||
"valid resource duration - go format",
|
||||
[]byte(`
|
||||
kind: name
|
||||
apiVersion: v1
|
||||
interval: 5s
|
||||
`),
|
||||
[]byte(`{
|
||||
"title": "Example Schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string"
|
||||
},
|
||||
"interval": {
|
||||
"type": "string",
|
||||
"format": "duration"
|
||||
}
|
||||
},
|
||||
"required": ["interval"]
|
||||
}`),
|
||||
nil,
|
||||
false,
|
||||
false,
|
||||
Valid,
|
||||
[]ValidationError{},
|
||||
},
|
||||
{
|
||||
"valid resource duration - iso8601 format",
|
||||
[]byte(`
|
||||
kind: name
|
||||
apiVersion: v1
|
||||
interval: PT1H
|
||||
`),
|
||||
[]byte(`{
|
||||
"title": "Example Schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string"
|
||||
},
|
||||
"interval": {
|
||||
"type": "string",
|
||||
"format": "duration"
|
||||
}
|
||||
},
|
||||
"required": ["interval"]
|
||||
}`),
|
||||
nil,
|
||||
false,
|
||||
false,
|
||||
Valid,
|
||||
[]ValidationError{},
|
||||
},
|
||||
{
|
||||
"invalid resource duration",
|
||||
[]byte(`
|
||||
kind: name
|
||||
apiVersion: v1
|
||||
interval: test
|
||||
`),
|
||||
[]byte(`{
|
||||
"title": "Example Schema",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kind": {
|
||||
"type": "string"
|
||||
},
|
||||
"interval": {
|
||||
"type": "string",
|
||||
"format": "duration"
|
||||
}
|
||||
},
|
||||
"required": ["interval"]
|
||||
}`),
|
||||
nil,
|
||||
false,
|
||||
false,
|
||||
Invalid,
|
||||
[]ValidationError{{Path: "/interval", Msg: "'test' is not valid duration: must start with P"}},
|
||||
},
|
||||
} {
|
||||
val := v{
|
||||
opts: Opts{
|
||||
|
|
@ -385,14 +468,27 @@ lastName: bar
|
|||
IgnoreMissingSchemas: testCase.ignoreMissingSchema,
|
||||
Strict: testCase.strict,
|
||||
},
|
||||
schemaCache: nil,
|
||||
schemaDownload: downloadSchema,
|
||||
regs: []registry.Registry{
|
||||
newMockRegistry(func() (string, []byte, error) {
|
||||
return "", testCase.schemaRegistry1, nil
|
||||
newMockRegistry(func() (string, any, error) {
|
||||
if testCase.schemaRegistry1 == nil {
|
||||
return "", nil, loader.NewNotFoundError(nil)
|
||||
}
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(testCase.schemaRegistry1))
|
||||
if err != nil {
|
||||
return "", s, loader.NewNonJSONResponseError(err)
|
||||
}
|
||||
return "", s, err
|
||||
}),
|
||||
newMockRegistry(func() (string, []byte, error) {
|
||||
return "", testCase.schemaRegistry2, nil
|
||||
newMockRegistry(func() (string, any, error) {
|
||||
if testCase.schemaRegistry2 == nil {
|
||||
return "", nil, loader.NewNotFoundError(nil)
|
||||
}
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(testCase.schemaRegistry2))
|
||||
if err != nil {
|
||||
return "", s, loader.NewNonJSONResponseError(err)
|
||||
}
|
||||
return "", s, err
|
||||
}),
|
||||
},
|
||||
}
|
||||
|
|
@ -447,8 +543,8 @@ age: not a number
|
|||
}`)
|
||||
|
||||
expectedErrors := []ValidationError{
|
||||
{Path: "", Msg: "missing properties: 'lastName'"},
|
||||
{Path: "/age", Msg: "expected integer, but got string"},
|
||||
{Path: "", Msg: "missing property 'lastName'"},
|
||||
{Path: "/age", Msg: "got string, want integer"},
|
||||
}
|
||||
|
||||
val := v{
|
||||
|
|
@ -456,11 +552,14 @@ age: not a number
|
|||
SkipKinds: map[string]struct{}{},
|
||||
RejectKinds: map[string]struct{}{},
|
||||
},
|
||||
schemaCache: nil,
|
||||
schemaDownload: downloadSchema,
|
||||
regs: []registry.Registry{
|
||||
newMockRegistry(func() (string, []byte, error) {
|
||||
return "", schema, nil
|
||||
newMockRegistry(func() (string, any, error) {
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(schema))
|
||||
if err != nil {
|
||||
return "", s, loader.NewNonJSONResponseError(err)
|
||||
}
|
||||
return "", s, err
|
||||
}),
|
||||
},
|
||||
}
|
||||
|
|
@ -505,11 +604,11 @@ firstName: foo
|
|||
SkipKinds: map[string]struct{}{},
|
||||
RejectKinds: map[string]struct{}{},
|
||||
},
|
||||
schemaCache: nil,
|
||||
schemaDownload: downloadSchema,
|
||||
regs: []registry.Registry{
|
||||
newMockRegistry(func() (string, []byte, error) {
|
||||
return "", schema, nil
|
||||
newMockRegistry(func() (string, any, error) {
|
||||
s, err := jsonschema.UnmarshalJSON(bytes.NewReader(schema))
|
||||
return "", s, err
|
||||
}),
|
||||
},
|
||||
}
|
||||
|
|
@ -523,7 +622,7 @@ firstName: foo
|
|||
|
||||
expectedStatuses := []Status{Valid, Invalid}
|
||||
expectedValidationErrors := []ValidationError{
|
||||
{Path: "", Msg: "missing properties: 'lastName'"},
|
||||
{Path: "", Msg: "missing property 'lastName'"},
|
||||
}
|
||||
if !reflect.DeepEqual(expectedStatuses, gotStatuses) {
|
||||
t.Errorf("Expected %+v, got %+v", expectedStatuses, gotStatuses)
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
---
|
||||
title: "Github Action"
|
||||
title: "GitHub Action"
|
||||
date: 2021-07-02T00:00:00Z
|
||||
draft: false
|
||||
tags: ["Kubeconform", "Usage"]
|
||||
weight: 6
|
||||
---
|
||||
|
||||
Kubeconform is publishes Docker Images to Github's new Container Registry, ghcr.io. These images
|
||||
can be used directly in a Github Action, once logged in using a [_Github Token_](https://github.blog/changelog/2021-03-24-packages-container-registry-now-supports-github_token/).
|
||||
Kubeconform is publishes Docker Images to GitHub's new Container Registry, ghcr.io. These images
|
||||
can be used directly in a GitHub Action, once logged in using a [_GitHub Token_](https://github.blog/changelog/2021-03-24-packages-container-registry-now-supports-github_token/).
|
||||
|
||||
{{< prism >}}name: kubeconform
|
||||
on: push
|
||||
|
|
@ -15,7 +15,7 @@ jobs:
|
|||
kubeconform:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: login to Github Packages
|
||||
- name: login to GitHub Packages
|
||||
run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin
|
||||
- uses: actions/checkout@v2
|
||||
- uses: docker://ghcr.io/yannh/kubeconform:master
|
||||
|
|
@ -24,8 +24,8 @@ jobs:
|
|||
args: "-summary -output json kubeconfigs/"
|
||||
{{< /prism >}}
|
||||
|
||||
_Note on pricing_: Kubeconform relies on Github Container Registry which is currently in Beta. During that period,
|
||||
_Note on pricing_: Kubeconform relies on GitHub Container Registry which is currently in Beta. During that period,
|
||||
[bandwidth is free](https://docs.github.com/en/packages/guides/about-github-container-registry). After that period,
|
||||
bandwidth costs might be applicable. Since bandwidth from Github Packages within Github Actions is free, I expect
|
||||
Github Container Registry to also be usable for free within Github Actions in the future. If that were not to be the
|
||||
bandwidth costs might be applicable. Since bandwidth from GitHub Packages within GitHub Actions is free, I expect
|
||||
GitHub Container Registry to also be usable for free within GitHub Actions in the future. If that were not to be the
|
||||
case, I might publish the Docker image to a different platform.
|
||||
|
|
@ -8,7 +8,7 @@
|
|||
<div id=main-container><div id=header>
|
||||
<ul id=navigation>
|
||||
<li><a href=/about>About</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>Github</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>GitHub</a></li>
|
||||
<li><a href=/docs/installation/>Docs</a></li>
|
||||
<li><a href=/>Home</a></li>
|
||||
</ul>
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
<div id=main-container><div id=header>
|
||||
<ul id=navigation>
|
||||
<li><a href=/about>About</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>Github</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>GitHub</a></li>
|
||||
<li><a href=/docs/installation/>Docs</a></li>
|
||||
<li><a href=/>Home</a></li>
|
||||
</ul>
|
||||
|
|
@ -21,7 +21,7 @@
|
|||
<li><a href=http://kubeconform.mandragor.org/docs/usage/>Usage</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/crd-support/>Custom Resources support</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/json-schema-conversion/>OpenAPI to JSON Schema conversion</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>Github Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>GitHub Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/using-as-a-go-module/>Kubeconform as a Go module</a></li>
|
||||
</ul>
|
||||
<div id=main>
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ For example, for Linux on x86_64 architecture:
|
|||
curl -L https://github.com/yannh/kubeconform/releases/latest/download/kubeconform-linux-amd64.tar.gz | tar xvzf - && \ sudo mv kubeconform /usr/local/bin/ MacOs Kubeconform is available to install using Homebrew: $ brew install kubeconform
|
||||
Windows Download the latest release from our release page.</description></item><item><title>Usage</title><link>http://kubeconform.mandragor.org/docs/usage/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage/</guid><description>$ ./bin/kubeconform -h Usage: ./bin/kubeconform [OPTION]... [FILE OR FOLDER]... -cache string cache schemas downloaded via HTTP to this folder -cpu-prof string debug - log CPU profiling to file -exit-on-error immediately stop execution when the first error is encountered -h show help information -ignore-filename-pattern value regular expression specifying paths to ignore (can be specified multiple times) -ignore-missing-schemas skip files with missing schemas instead of failing -insecure-skip-tls-verify disable verification of the server's SSL certificate.</description></item><item><title>Custom Resources support</title><link>http://kubeconform.mandragor.org/docs/crd-support/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/crd-support/</guid><description>When the -schema-location parameter is not used, or set to &ldquo;default&rdquo;, kubeconform will default to downloading schemas from https://github.com/yannh/kubernetes-json-schema. Kubeconform however supports passing one, or multiple, schemas locations - HTTP(s) URLs, or local filesystem paths, in which case it will lookup for schema definitions in each of them, in order, stopping as soon as a matching file is found.
|
||||
If the -schema-location value does not end with &lsquo;.json&rsquo;, Kubeconform will assume filenames / a file structure identical to that of kubernetesjsonschema.</description></item><item><title>OpenAPI to JSON Schema conversion</title><link>http://kubeconform.mandragor.org/docs/json-schema-conversion/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/json-schema-conversion/</guid><description>Kubeconform uses JSON schemas to validate Kubernetes resources. For custom resources, the CustomResourceDefinition first needs to be converted to JSON Schema. A script is provided to convert these CustomResourceDefinitions to JSON schema. Here is an example how to use it:
|
||||
#!/bin/bash $ ./scripts/openapi2jsonschema.py https://raw.githubusercontent.com/aws/amazon-sagemaker-operator-for-k8s/master/config/crd/bases/sagemaker.aws.amazon.com_trainingjobs.yaml JSON schema written to trainingjob_v1.json The FILENAME_FORMAT environment variable can be used to change the output file name (Available variables: kind, group, version) (Default: {kind}_{version}).</description></item><item><title>Github Action</title><link>http://kubeconform.mandragor.org/docs/usage-as-github-action/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage-as-github-action/</guid><description>Kubeconform is publishes Docker Images to Github&rsquo;s new Container Registry, ghcr.io. These images can be used directly in a Github Action, once logged in using a Github Token.
|
||||
name: kubeconform on: push jobs: kubeconform: runs-on: ubuntu-latest steps: - name: login to Github Packages run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin - uses: actions/checkout@v2 - uses: docker://ghcr.io/yannh/kubeconform:master with: entrypoint: '/kubeconform' args: "-summary -output json kubeconfigs/" Note on pricing: Kubeconform relies on Github Container Registry which is currently in Beta.</description></item><item><title>Kubeconform as a Go module</title><link>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</guid><description>Warning: This is a work-in-progress, the interface is not yet considered stable. Feedback is encouraged.
|
||||
#!/bin/bash $ ./scripts/openapi2jsonschema.py https://raw.githubusercontent.com/aws/amazon-sagemaker-operator-for-k8s/master/config/crd/bases/sagemaker.aws.amazon.com_trainingjobs.yaml JSON schema written to trainingjob_v1.json The FILENAME_FORMAT environment variable can be used to change the output file name (Available variables: kind, group, version) (Default: {kind}_{version}).</description></item><item><title>GitHub Action</title><link>http://kubeconform.mandragor.org/docs/usage-as-github-action/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage-as-github-action/</guid><description>Kubeconform is publishes Docker Images to GitHub&rsquo;s new Container Registry, ghcr.io. These images can be used directly in a GitHub Action, once logged in using a GitHub Token.
|
||||
name: kubeconform on: push jobs: kubeconform: runs-on: ubuntu-latest steps: - name: login to GitHub Packages run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin - uses: actions/checkout@v2 - uses: docker://ghcr.io/yannh/kubeconform:master with: entrypoint: '/kubeconform' args: "-summary -output json kubeconfigs/" Note on pricing: Kubeconform relies on GitHub Container Registry which is currently in Beta.</description></item><item><title>Kubeconform as a Go module</title><link>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</guid><description>Warning: This is a work-in-progress, the interface is not yet considered stable. Feedback is encouraged.
|
||||
Kubeconform contains a package that can be used as a library. An example of usage can be found in examples/main.go
|
||||
Additional documentation on pkg.go.dev</description></item></channel></rss>
|
||||
|
|
@ -8,7 +8,7 @@
|
|||
<div id=main-container><div id=header>
|
||||
<ul id=navigation>
|
||||
<li><a href=/about>About</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>Github</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>GitHub</a></li>
|
||||
<li><a href=/docs/installation/>Docs</a></li>
|
||||
<li><a href=/>Home</a></li>
|
||||
</ul>
|
||||
|
|
@ -21,7 +21,7 @@
|
|||
<li><a href=http://kubeconform.mandragor.org/docs/usage/>Usage</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/crd-support/>Custom Resources support</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/json-schema-conversion/>OpenAPI to JSON Schema conversion</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>Github Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>GitHub Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/using-as-a-go-module/>Kubeconform as a Go module</a></li>
|
||||
</ul>
|
||||
<div id=main>
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
<div id=main-container><div id=header>
|
||||
<ul id=navigation>
|
||||
<li><a href=/about>About</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>Github</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>GitHub</a></li>
|
||||
<li><a href=/docs/installation/>Docs</a></li>
|
||||
<li><a href=/>Home</a></li>
|
||||
</ul>
|
||||
|
|
@ -21,13 +21,13 @@
|
|||
<li><a href=http://kubeconform.mandragor.org/docs/usage/>Usage</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/crd-support/>Custom Resources support</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/json-schema-conversion/>OpenAPI to JSON Schema conversion</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>Github Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>GitHub Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/using-as-a-go-module/>Kubeconform as a Go module</a></li>
|
||||
</ul>
|
||||
<div id=main>
|
||||
<div class=navig>
|
||||
<a href=http://kubeconform.mandragor.org/docs/crd-support/ id=prev>< Custom Resources support</a>
|
||||
<a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/ id=next>Github Action ></a>
|
||||
<a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/ id=next>GitHub Action ></a>
|
||||
</div>
|
||||
<div id=content-text>
|
||||
<h1>OpenAPI to JSON Schema conversion</h1>
|
||||
|
|
@ -47,7 +47,7 @@ JSON schema written to trainingjob-sagemaker-v1.json
|
|||
</div>
|
||||
<div class=navig>
|
||||
<a href=http://kubeconform.mandragor.org/docs/crd-support/ id=prev>< Custom Resources support</a>
|
||||
<a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/ id=next>Github Action ></a>
|
||||
<a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/ id=next>GitHub Action ></a>
|
||||
</div>
|
||||
<script defer src=/js/prism.js></script>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -2,13 +2,13 @@
|
|||
<meta charset=utf-8>
|
||||
<meta name=author content="Yann Hamon">
|
||||
<link rel=stylesheet type=text/css href=/css/style.css><link rel=stylesheet type=text/css href=/css/prism.css>
|
||||
<title>Kubeconform - Fast Kubernetes manifests validation! | Github Action</title>
|
||||
<title>Kubeconform - Fast Kubernetes manifests validation! | GitHub Action</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id=main-container><div id=header>
|
||||
<ul id=navigation>
|
||||
<li><a href=/about>About</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>Github</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>GitHub</a></li>
|
||||
<li><a href=/docs/installation/>Docs</a></li>
|
||||
<li><a href=/>Home</a></li>
|
||||
</ul>
|
||||
|
|
@ -21,7 +21,7 @@
|
|||
<li><a href=http://kubeconform.mandragor.org/docs/usage/>Usage</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/crd-support/>Custom Resources support</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/json-schema-conversion/>OpenAPI to JSON Schema conversion</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>Github Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>GitHub Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/using-as-a-go-module/>Kubeconform as a Go module</a></li>
|
||||
</ul>
|
||||
<div id=main>
|
||||
|
|
@ -30,16 +30,16 @@
|
|||
<a href=http://kubeconform.mandragor.org/docs/using-as-a-go-module/ id=next>Kubeconform as a Go module ></a>
|
||||
</div>
|
||||
<div id=content-text>
|
||||
<h1>Github Action</h1>
|
||||
<p>Kubeconform is publishes Docker Images to Github’s new Container Registry, ghcr.io. These images
|
||||
can be used directly in a Github Action, once logged in using a <a href=https://github.blog/changelog/2021-03-24-packages-container-registry-now-supports-github_token/><em>Github Token</em></a>.</p>
|
||||
<h1>GitHub Action</h1>
|
||||
<p>Kubeconform is publishes Docker Images to GitHub’s new Container Registry, ghcr.io. These images
|
||||
can be used directly in a GitHub Action, once logged in using a <a href=https://github.blog/changelog/2021-03-24-packages-container-registry-now-supports-github_token/><em>GitHub Token</em></a>.</p>
|
||||
<pre><code class=language-bash>name: kubeconform
|
||||
on: push
|
||||
jobs:
|
||||
kubeconform:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: login to Github Packages
|
||||
- name: login to GitHub Packages
|
||||
run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin
|
||||
- uses: actions/checkout@v2
|
||||
- uses: docker://ghcr.io/yannh/kubeconform:master
|
||||
|
|
@ -47,10 +47,10 @@ jobs:
|
|||
entrypoint: '/kubeconform'
|
||||
args: "-summary -output json kubeconfigs/"
|
||||
</code></pre>
|
||||
<p><em>Note on pricing</em>: Kubeconform relies on Github Container Registry which is currently in Beta. During that period,
|
||||
<p><em>Note on pricing</em>: Kubeconform relies on GitHub Container Registry which is currently in Beta. During that period,
|
||||
<a href=https://docs.github.com/en/packages/guides/about-github-container-registry>bandwidth is free</a>. After that period,
|
||||
bandwidth costs might be applicable. Since bandwidth from Github Packages within Github Actions is free, I expect
|
||||
Github Container Registry to also be usable for free within Github Actions in the future. If that were not to be the
|
||||
bandwidth costs might be applicable. Since bandwidth from GitHub Packages within GitHub Actions is free, I expect
|
||||
GitHub Container Registry to also be usable for free within GitHub Actions in the future. If that were not to be the
|
||||
case, I might publish the Docker image to a different platform.</p>
|
||||
</div>
|
||||
<div class=navig>
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
<div id=main-container><div id=header>
|
||||
<ul id=navigation>
|
||||
<li><a href=/about>About</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>Github</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>GitHub</a></li>
|
||||
<li><a href=/docs/installation/>Docs</a></li>
|
||||
<li><a href=/>Home</a></li>
|
||||
</ul>
|
||||
|
|
@ -21,7 +21,7 @@
|
|||
<li><a href=http://kubeconform.mandragor.org/docs/usage/>Usage</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/crd-support/>Custom Resources support</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/json-schema-conversion/>OpenAPI to JSON Schema conversion</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>Github Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>GitHub Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/using-as-a-go-module/>Kubeconform as a Go module</a></li>
|
||||
</ul>
|
||||
<div id=main>
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
<div id=main-container><div id=header>
|
||||
<ul id=navigation>
|
||||
<li><a href=/about>About</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>Github</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>GitHub</a></li>
|
||||
<li><a href=/docs/installation/>Docs</a></li>
|
||||
<li><a href=/>Home</a></li>
|
||||
</ul>
|
||||
|
|
@ -21,12 +21,12 @@
|
|||
<li><a href=http://kubeconform.mandragor.org/docs/usage/>Usage</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/crd-support/>Custom Resources support</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/json-schema-conversion/>OpenAPI to JSON Schema conversion</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>Github Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/>GitHub Action</a></li>
|
||||
<li><a href=http://kubeconform.mandragor.org/docs/using-as-a-go-module/>Kubeconform as a Go module</a></li>
|
||||
</ul>
|
||||
<div id=main>
|
||||
<div class=navig>
|
||||
<a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/ id=prev>< Github Action</a>
|
||||
<a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/ id=prev>< GitHub Action</a>
|
||||
<a href=# id=prev></a>
|
||||
</div>
|
||||
<div id=content-text>
|
||||
|
|
@ -37,7 +37,7 @@ An example of usage can be found in <a href=https://github.com/yannh/kubeconform
|
|||
<p>Additional documentation on <a href=https://pkg.go.dev/github.com/yannh/kubeconform/pkg/validator>pkg.go.dev</a></p>
|
||||
</div>
|
||||
<div class=navig>
|
||||
<a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/ id=prev>< Github Action</a>
|
||||
<a href=http://kubeconform.mandragor.org/docs/usage-as-github-action/ id=prev>< GitHub Action</a>
|
||||
<a href=# id=prev></a>
|
||||
</div>
|
||||
<script defer src=/js/prism.js></script>
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@
|
|||
<div id=main-container><div id=header>
|
||||
<ul id=navigation>
|
||||
<li><a href=/about>About</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>Github</a></li>
|
||||
<li><a href=https://github.com/yannh/kubeconform/>GitHub</a></li>
|
||||
<li><a href=/docs/installation/>Docs</a></li>
|
||||
<li><a href=/>Home</a></li>
|
||||
</ul>
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ For example, for Linux on x86_64 architecture:
|
|||
curl -L https://github.com/yannh/kubeconform/releases/latest/download/kubeconform-linux-amd64.tar.gz | tar xvzf - && \ sudo mv kubeconform /usr/local/bin/ MacOs Kubeconform is available to install using Homebrew: $ brew install kubeconform
|
||||
Windows Download the latest release from our release page.</description></item><item><title>Usage</title><link>http://kubeconform.mandragor.org/docs/usage/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage/</guid><description>$ ./bin/kubeconform -h Usage: ./bin/kubeconform [OPTION]... [FILE OR FOLDER]... -cache string cache schemas downloaded via HTTP to this folder -cpu-prof string debug - log CPU profiling to file -exit-on-error immediately stop execution when the first error is encountered -h show help information -ignore-filename-pattern value regular expression specifying paths to ignore (can be specified multiple times) -ignore-missing-schemas skip files with missing schemas instead of failing -insecure-skip-tls-verify disable verification of the server's SSL certificate.</description></item><item><title>Custom Resources support</title><link>http://kubeconform.mandragor.org/docs/crd-support/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/crd-support/</guid><description>When the -schema-location parameter is not used, or set to &ldquo;default&rdquo;, kubeconform will default to downloading schemas from https://github.com/yannh/kubernetes-json-schema. Kubeconform however supports passing one, or multiple, schemas locations - HTTP(s) URLs, or local filesystem paths, in which case it will lookup for schema definitions in each of them, in order, stopping as soon as a matching file is found.
|
||||
If the -schema-location value does not end with &lsquo;.json&rsquo;, Kubeconform will assume filenames / a file structure identical to that of kubernetesjsonschema.</description></item><item><title>OpenAPI to JSON Schema conversion</title><link>http://kubeconform.mandragor.org/docs/json-schema-conversion/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/json-schema-conversion/</guid><description>Kubeconform uses JSON schemas to validate Kubernetes resources. For custom resources, the CustomResourceDefinition first needs to be converted to JSON Schema. A script is provided to convert these CustomResourceDefinitions to JSON schema. Here is an example how to use it:
|
||||
#!/bin/bash $ ./scripts/openapi2jsonschema.py https://raw.githubusercontent.com/aws/amazon-sagemaker-operator-for-k8s/master/config/crd/bases/sagemaker.aws.amazon.com_trainingjobs.yaml JSON schema written to trainingjob_v1.json The FILENAME_FORMAT environment variable can be used to change the output file name (Available variables: kind, group, version) (Default: {kind}_{version}).</description></item><item><title>Github Action</title><link>http://kubeconform.mandragor.org/docs/usage-as-github-action/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage-as-github-action/</guid><description>Kubeconform is publishes Docker Images to Github&rsquo;s new Container Registry, ghcr.io. These images can be used directly in a Github Action, once logged in using a Github Token.
|
||||
name: kubeconform on: push jobs: kubeconform: runs-on: ubuntu-latest steps: - name: login to Github Packages run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin - uses: actions/checkout@v2 - uses: docker://ghcr.io/yannh/kubeconform:master with: entrypoint: '/kubeconform' args: "-summary -output json kubeconfigs/" Note on pricing: Kubeconform relies on Github Container Registry which is currently in Beta.</description></item><item><title>Kubeconform as a Go module</title><link>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</guid><description>Warning: This is a work-in-progress, the interface is not yet considered stable. Feedback is encouraged.
|
||||
#!/bin/bash $ ./scripts/openapi2jsonschema.py https://raw.githubusercontent.com/aws/amazon-sagemaker-operator-for-k8s/master/config/crd/bases/sagemaker.aws.amazon.com_trainingjobs.yaml JSON schema written to trainingjob_v1.json The FILENAME_FORMAT environment variable can be used to change the output file name (Available variables: kind, group, version) (Default: {kind}_{version}).</description></item><item><title>GitHub Action</title><link>http://kubeconform.mandragor.org/docs/usage-as-github-action/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage-as-github-action/</guid><description>Kubeconform is publishes Docker Images to GitHub&rsquo;s new Container Registry, ghcr.io. These images can be used directly in a GitHub Action, once logged in using a GitHub Token.
|
||||
name: kubeconform on: push jobs: kubeconform: runs-on: ubuntu-latest steps: - name: login to GitHub Packages run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin - uses: actions/checkout@v2 - uses: docker://ghcr.io/yannh/kubeconform:master with: entrypoint: '/kubeconform' args: "-summary -output json kubeconfigs/" Note on pricing: Kubeconform relies on GitHub Container Registry which is currently in Beta.</description></item><item><title>Kubeconform as a Go module</title><link>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</guid><description>Warning: This is a work-in-progress, the interface is not yet considered stable. Feedback is encouraged.
|
||||
Kubeconform contains a package that can be used as a library. An example of usage can be found in examples/main.go
|
||||
Additional documentation on pkg.go.dev</description></item><item><title>About</title><link>http://kubeconform.mandragor.org/about/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/about/</guid><description>Kubeconform is a Kubernetes manifests validation tool. Build it into your CI to validate your Kubernetes configuration!
|
||||
It is inspired by, contains code from and is designed to stay close to Kubeval, but with the following improvements:
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@
|
|||
</div>
|
||||
|
||||
</div><div id="footer">
|
||||
<h3>Github</h3>
|
||||
<h3>GitHub</h3>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ For example, for Linux on x86_64 architecture:
|
|||
curl -L https://github.com/yannh/kubeconform/releases/latest/download/kubeconform-linux-amd64.tar.gz | tar xvzf - && \ sudo mv kubeconform /usr/local/bin/ MacOs Kubeconform is available to install using Homebrew: $ brew install kubeconform
|
||||
Windows Download the latest release from our release page.</description></item><item><title>Usage</title><link>http://kubeconform.mandragor.org/docs/usage/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage/</guid><description>$ ./bin/kubeconform -h Usage: ./bin/kubeconform [OPTION]... [FILE OR FOLDER]... -cache string cache schemas downloaded via HTTP to this folder -cpu-prof string debug - log CPU profiling to file -exit-on-error immediately stop execution when the first error is encountered -h show help information -ignore-filename-pattern value regular expression specifying paths to ignore (can be specified multiple times) -ignore-missing-schemas skip files with missing schemas instead of failing -insecure-skip-tls-verify disable verification of the server's SSL certificate.</description></item><item><title>Custom Resources support</title><link>http://kubeconform.mandragor.org/docs/crd-support/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/crd-support/</guid><description>When the -schema-location parameter is not used, or set to &ldquo;default&rdquo;, kubeconform will default to downloading schemas from https://github.com/yannh/kubernetes-json-schema. Kubeconform however supports passing one, or multiple, schemas locations - HTTP(s) URLs, or local filesystem paths, in which case it will lookup for schema definitions in each of them, in order, stopping as soon as a matching file is found.
|
||||
If the -schema-location value does not end with &lsquo;.json&rsquo;, Kubeconform will assume filenames / a file structure identical to that of kubernetesjsonschema.</description></item><item><title>OpenAPI to JSON Schema conversion</title><link>http://kubeconform.mandragor.org/docs/json-schema-conversion/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/json-schema-conversion/</guid><description>Kubeconform uses JSON schemas to validate Kubernetes resources. For custom resources, the CustomResourceDefinition first needs to be converted to JSON Schema. A script is provided to convert these CustomResourceDefinitions to JSON schema. Here is an example how to use it:
|
||||
#!/bin/bash $ ./scripts/openapi2jsonschema.py https://raw.githubusercontent.com/aws/amazon-sagemaker-operator-for-k8s/master/config/crd/bases/sagemaker.aws.amazon.com_trainingjobs.yaml JSON schema written to trainingjob_v1.json The FILENAME_FORMAT environment variable can be used to change the output file name (Available variables: kind, group, version) (Default: {kind}_{version}).</description></item><item><title>Github Action</title><link>http://kubeconform.mandragor.org/docs/usage-as-github-action/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage-as-github-action/</guid><description>Kubeconform is publishes Docker Images to Github&rsquo;s new Container Registry, ghcr.io. These images can be used directly in a Github Action, once logged in using a Github Token.
|
||||
name: kubeconform on: push jobs: kubeconform: runs-on: ubuntu-latest steps: - name: login to Github Packages run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin - uses: actions/checkout@v2 - uses: docker://ghcr.io/yannh/kubeconform:master with: entrypoint: '/kubeconform' args: "-summary -output json kubeconfigs/" Note on pricing: Kubeconform relies on Github Container Registry which is currently in Beta.</description></item><item><title>Kubeconform as a Go module</title><link>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</guid><description>Warning: This is a work-in-progress, the interface is not yet considered stable. Feedback is encouraged.
|
||||
#!/bin/bash $ ./scripts/openapi2jsonschema.py https://raw.githubusercontent.com/aws/amazon-sagemaker-operator-for-k8s/master/config/crd/bases/sagemaker.aws.amazon.com_trainingjobs.yaml JSON schema written to trainingjob_v1.json The FILENAME_FORMAT environment variable can be used to change the output file name (Available variables: kind, group, version) (Default: {kind}_{version}).</description></item><item><title>GitHub Action</title><link>http://kubeconform.mandragor.org/docs/usage-as-github-action/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage-as-github-action/</guid><description>Kubeconform is publishes Docker Images to GitHub&rsquo;s new Container Registry, ghcr.io. These images can be used directly in a GitHub Action, once logged in using a GitHub Token.
|
||||
name: kubeconform on: push jobs: kubeconform: runs-on: ubuntu-latest steps: - name: login to GitHub Packages run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin - uses: actions/checkout@v2 - uses: docker://ghcr.io/yannh/kubeconform:master with: entrypoint: '/kubeconform' args: "-summary -output json kubeconfigs/" Note on pricing: Kubeconform relies on GitHub Container Registry which is currently in Beta.</description></item><item><title>Kubeconform as a Go module</title><link>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</guid><description>Warning: This is a work-in-progress, the interface is not yet considered stable. Feedback is encouraged.
|
||||
Kubeconform contains a package that can be used as a library. An example of usage can be found in examples/main.go
|
||||
Additional documentation on pkg.go.dev</description></item><item><title>About</title><link>http://kubeconform.mandragor.org/about/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/about/</guid><description>Kubeconform is a Kubernetes manifests validation tool. Build it into your CI to validate your Kubernetes configuration!
|
||||
It is inspired by, contains code from and is designed to stay close to Kubeval, but with the following improvements:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
<?xml version="1.0" encoding="utf-8" standalone="yes"?><rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom"><channel><title>Usage on Kubeconform - Fast Kubernetes manifests validation!</title><link>http://kubeconform.mandragor.org/tags/usage/</link><description>Recent content in Usage on Kubeconform - Fast Kubernetes manifests validation!</description><generator>Hugo -- gohugo.io</generator><language>en-us</language><lastBuildDate>Fri, 02 Jul 2021 00:00:00 +0000</lastBuildDate><atom:link href="http://kubeconform.mandragor.org/tags/usage/index.xml" rel="self" type="application/rss+xml"/><item><title>Usage</title><link>http://kubeconform.mandragor.org/docs/usage/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage/</guid><description>$ ./bin/kubeconform -h Usage: ./bin/kubeconform [OPTION]... [FILE OR FOLDER]... -cache string cache schemas downloaded via HTTP to this folder -cpu-prof string debug - log CPU profiling to file -exit-on-error immediately stop execution when the first error is encountered -h show help information -ignore-filename-pattern value regular expression specifying paths to ignore (can be specified multiple times) -ignore-missing-schemas skip files with missing schemas instead of failing -insecure-skip-tls-verify disable verification of the server's SSL certificate.</description></item><item><title>Custom Resources support</title><link>http://kubeconform.mandragor.org/docs/crd-support/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/crd-support/</guid><description>When the -schema-location parameter is not used, or set to &ldquo;default&rdquo;, kubeconform will default to downloading schemas from https://github.com/yannh/kubernetes-json-schema. Kubeconform however supports passing one, or multiple, schemas locations - HTTP(s) URLs, or local filesystem paths, in which case it will lookup for schema definitions in each of them, in order, stopping as soon as a matching file is found.
|
||||
If the -schema-location value does not end with &lsquo;.json&rsquo;, Kubeconform will assume filenames / a file structure identical to that of kubernetesjsonschema.</description></item><item><title>OpenAPI to JSON Schema conversion</title><link>http://kubeconform.mandragor.org/docs/json-schema-conversion/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/json-schema-conversion/</guid><description>Kubeconform uses JSON schemas to validate Kubernetes resources. For custom resources, the CustomResourceDefinition first needs to be converted to JSON Schema. A script is provided to convert these CustomResourceDefinitions to JSON schema. Here is an example how to use it:
|
||||
#!/bin/bash $ ./scripts/openapi2jsonschema.py https://raw.githubusercontent.com/aws/amazon-sagemaker-operator-for-k8s/master/config/crd/bases/sagemaker.aws.amazon.com_trainingjobs.yaml JSON schema written to trainingjob_v1.json The FILENAME_FORMAT environment variable can be used to change the output file name (Available variables: kind, group, version) (Default: {kind}_{version}).</description></item><item><title>Github Action</title><link>http://kubeconform.mandragor.org/docs/usage-as-github-action/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage-as-github-action/</guid><description>Kubeconform is publishes Docker Images to Github&rsquo;s new Container Registry, ghcr.io. These images can be used directly in a Github Action, once logged in using a Github Token.
|
||||
name: kubeconform on: push jobs: kubeconform: runs-on: ubuntu-latest steps: - name: login to Github Packages run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin - uses: actions/checkout@v2 - uses: docker://ghcr.io/yannh/kubeconform:master with: entrypoint: '/kubeconform' args: "-summary -output json kubeconfigs/" Note on pricing: Kubeconform relies on Github Container Registry which is currently in Beta.</description></item><item><title>Kubeconform as a Go module</title><link>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</guid><description>Warning: This is a work-in-progress, the interface is not yet considered stable. Feedback is encouraged.
|
||||
#!/bin/bash $ ./scripts/openapi2jsonschema.py https://raw.githubusercontent.com/aws/amazon-sagemaker-operator-for-k8s/master/config/crd/bases/sagemaker.aws.amazon.com_trainingjobs.yaml JSON schema written to trainingjob_v1.json The FILENAME_FORMAT environment variable can be used to change the output file name (Available variables: kind, group, version) (Default: {kind}_{version}).</description></item><item><title>GitHub Action</title><link>http://kubeconform.mandragor.org/docs/usage-as-github-action/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/usage-as-github-action/</guid><description>Kubeconform is publishes Docker Images to GitHub&rsquo;s new Container Registry, ghcr.io. These images can be used directly in a GitHub Action, once logged in using a GitHub Token.
|
||||
name: kubeconform on: push jobs: kubeconform: runs-on: ubuntu-latest steps: - name: login to GitHub Packages run: echo "${{ github.token }}" | docker login https://ghcr.io -u ${GITHUB_ACTOR} --password-stdin - uses: actions/checkout@v2 - uses: docker://ghcr.io/yannh/kubeconform:master with: entrypoint: '/kubeconform' args: "-summary -output json kubeconfigs/" Note on pricing: Kubeconform relies on GitHub Container Registry which is currently in Beta.</description></item><item><title>Kubeconform as a Go module</title><link>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</link><pubDate>Fri, 02 Jul 2021 00:00:00 +0000</pubDate><guid>http://kubeconform.mandragor.org/docs/using-as-a-go-module/</guid><description>Warning: This is a work-in-progress, the interface is not yet considered stable. Feedback is encouraged.
|
||||
Kubeconform contains a package that can be used as a library. An example of usage can be found in examples/main.go
|
||||
Additional documentation on pkg.go.dev</description></item></channel></rss>
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
<div id="header">
|
||||
<ul id="navigation">
|
||||
<li><a href="/about">About</a></li>
|
||||
<li><a href="https://github.com/yannh/kubeconform/">Github</a></li>
|
||||
<li><a href="https://github.com/yannh/kubeconform/">GitHub</a></li>
|
||||
<li><a href="/docs/installation/">Docs</a></li>
|
||||
<li><a href="/">Home</a></li>
|
||||
</ul>
|
||||
|
|
|
|||
4
vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
generated
vendored
4
vendor/github.com/santhosh-tekuri/jsonschema/v5/.gitignore
generated
vendored
|
|
@ -1,4 +0,0 @@
|
|||
.vscode
|
||||
.idea
|
||||
*.swp
|
||||
cmd/jv/jv
|
||||
220
vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
generated
vendored
220
vendor/github.com/santhosh-tekuri/jsonschema/v5/README.md
generated
vendored
|
|
@ -1,220 +0,0 @@
|
|||
# jsonschema v5.3.1
|
||||
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
|
||||
[](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema/v5)
|
||||
[](https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml)
|
||||
[](https://codecov.io/gh/santhosh-tekuri/jsonschema)
|
||||
|
||||
Package jsonschema provides json-schema compilation and validation.
|
||||
|
||||
[Benchmarks](https://dev.to/vearutop/benchmarking-correctness-and-performance-of-go-json-schema-validators-3247)
|
||||
|
||||
### Features:
|
||||
- implements
|
||||
[draft 2020-12](https://json-schema.org/specification-links.html#2020-12),
|
||||
[draft 2019-09](https://json-schema.org/specification-links.html#draft-2019-09-formerly-known-as-draft-8),
|
||||
[draft-7](https://json-schema.org/specification-links.html#draft-7),
|
||||
[draft-6](https://json-schema.org/specification-links.html#draft-6),
|
||||
[draft-4](https://json-schema.org/specification-links.html#draft-4)
|
||||
- fully compliant with [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite), (excluding some optional)
|
||||
- list of optional tests that are excluded can be found in schema_test.go(variable [skipTests](https://github.com/santhosh-tekuri/jsonschema/blob/master/schema_test.go#L24))
|
||||
- validates schemas against meta-schema
|
||||
- full support of remote references
|
||||
- support of recursive references between schemas
|
||||
- detects infinite loop in schemas
|
||||
- thread safe validation
|
||||
- rich, intuitive hierarchial error messages with json-pointers to exact location
|
||||
- supports output formats flag, basic and detailed
|
||||
- supports enabling format and content Assertions in draft2019-09 or above
|
||||
- change `Compiler.AssertFormat`, `Compiler.AssertContent` to `true`
|
||||
- compiled schema can be introspected. easier to develop tools like generating go structs given schema
|
||||
- supports user-defined keywords via [extensions](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-Extension)
|
||||
- implements following formats (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedFormat))
|
||||
- date-time, date, time, duration, period (supports leap-second)
|
||||
- uuid, hostname, email
|
||||
- ip-address, ipv4, ipv6
|
||||
- uri, uriref, uri-template(limited validation)
|
||||
- json-pointer, relative-json-pointer
|
||||
- regex, format
|
||||
- implements following contentEncoding (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
|
||||
- base64
|
||||
- implements following contentMediaType (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedContent))
|
||||
- application/json
|
||||
- can load from files/http/https/[string](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-FromString)/[]byte/io.Reader (supports [user-defined](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5/#example-package-UserDefinedLoader))
|
||||
|
||||
|
||||
see examples in [godoc](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v5)
|
||||
|
||||
The schema is compiled against the version specified in `$schema` property.
|
||||
If "$schema" property is missing, it uses latest draft which currently implemented
|
||||
by this library.
|
||||
|
||||
You can force to use specific version, when `$schema` is missing, as follows:
|
||||
|
||||
```go
|
||||
compiler := jsonschema.NewCompiler()
|
||||
compiler.Draft = jsonschema.Draft4
|
||||
```
|
||||
|
||||
This package supports loading json-schema from filePath and fileURL.
|
||||
|
||||
To load json-schema from HTTPURL, add following import:
|
||||
|
||||
```go
|
||||
import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
```
|
||||
|
||||
## Rich Errors
|
||||
|
||||
The ValidationError returned by Validate method contains detailed context to understand why and where the error is.
|
||||
|
||||
schema.json:
|
||||
```json
|
||||
{
|
||||
"$ref": "t.json#/definitions/employee"
|
||||
}
|
||||
```
|
||||
|
||||
t.json:
|
||||
```json
|
||||
{
|
||||
"definitions": {
|
||||
"employee": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
doc.json:
|
||||
```json
|
||||
1
|
||||
```
|
||||
|
||||
assuming `err` is the ValidationError returned when `doc.json` validated with `schema.json`,
|
||||
```go
|
||||
fmt.Printf("%#v\n", err) // using %#v prints errors hierarchy
|
||||
```
|
||||
Prints:
|
||||
```
|
||||
[I#] [S#] doesn't validate with file:///Users/santhosh/jsonschema/schema.json#
|
||||
[I#] [S#/$ref] doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'
|
||||
[I#] [S#/definitions/employee/type] expected string, but got number
|
||||
```
|
||||
|
||||
Here `I` stands for instance document and `S` stands for schema document.
|
||||
The json-fragments that caused error in instance and schema documents are represented using json-pointer notation.
|
||||
Nested causes are printed with indent.
|
||||
|
||||
To output `err` in `flag` output format:
|
||||
```go
|
||||
b, _ := json.MarshalIndent(err.FlagOutput(), "", " ")
|
||||
fmt.Println(string(b))
|
||||
```
|
||||
Prints:
|
||||
```json
|
||||
{
|
||||
"valid": false
|
||||
}
|
||||
```
|
||||
To output `err` in `basic` output format:
|
||||
```go
|
||||
b, _ := json.MarshalIndent(err.BasicOutput(), "", " ")
|
||||
fmt.Println(string(b))
|
||||
```
|
||||
Prints:
|
||||
```json
|
||||
{
|
||||
"valid": false,
|
||||
"errors": [
|
||||
{
|
||||
"keywordLocation": "",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
|
||||
"instanceLocation": "",
|
||||
"error": "doesn't validate with file:///Users/santhosh/jsonschema/schema.json#"
|
||||
},
|
||||
{
|
||||
"keywordLocation": "/$ref",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
|
||||
"instanceLocation": "",
|
||||
"error": "doesn't validate with 'file:///Users/santhosh/jsonschema/t.json#/definitions/employee'"
|
||||
},
|
||||
{
|
||||
"keywordLocation": "/$ref/type",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
|
||||
"instanceLocation": "",
|
||||
"error": "expected string, but got number"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
To output `err` in `detailed` output format:
|
||||
```go
|
||||
b, _ := json.MarshalIndent(err.DetailedOutput(), "", " ")
|
||||
fmt.Println(string(b))
|
||||
```
|
||||
Prints:
|
||||
```json
|
||||
{
|
||||
"valid": false,
|
||||
"keywordLocation": "",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#",
|
||||
"instanceLocation": "",
|
||||
"errors": [
|
||||
{
|
||||
"valid": false,
|
||||
"keywordLocation": "/$ref",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/schema.json#/$ref",
|
||||
"instanceLocation": "",
|
||||
"errors": [
|
||||
{
|
||||
"valid": false,
|
||||
"keywordLocation": "/$ref/type",
|
||||
"absoluteKeywordLocation": "file:///Users/santhosh/jsonschema/t.json#/definitions/employee/type",
|
||||
"instanceLocation": "",
|
||||
"error": "expected string, but got number"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## CLI
|
||||
|
||||
to install `go install github.com/santhosh-tekuri/jsonschema/cmd/jv@latest`
|
||||
|
||||
```bash
|
||||
jv [-draft INT] [-output FORMAT] [-assertformat] [-assertcontent] <json-schema> [<json-or-yaml-doc>]...
|
||||
-assertcontent
|
||||
enable content assertions with draft >= 2019
|
||||
-assertformat
|
||||
enable format assertions with draft >= 2019
|
||||
-draft int
|
||||
draft used when '$schema' attribute is missing. valid values 4, 5, 7, 2019, 2020 (default 2020)
|
||||
-output string
|
||||
output format. valid values flag, basic, detailed
|
||||
```
|
||||
|
||||
if no `<json-or-yaml-doc>` arguments are passed, it simply validates the `<json-schema>`.
|
||||
if `$schema` attribute is missing in schema, it uses latest version. this can be overridden by passing `-draft` flag
|
||||
|
||||
exit-code is 1, if there are any validation errors
|
||||
|
||||
`jv` can also validate yaml files. It also accepts schema from yaml files.
|
||||
|
||||
## Validating YAML Documents
|
||||
|
||||
since yaml supports non-string keys, such yaml documents are rendered as invalid json documents.
|
||||
|
||||
most yaml parser use `map[interface{}]interface{}` for object,
|
||||
whereas json parser uses `map[string]interface{}`.
|
||||
|
||||
so we need to manually convert them to `map[string]interface{}`.
|
||||
below code shows such conversion by `toStringKeys` function.
|
||||
|
||||
https://play.golang.org/p/Hhax3MrtD8r
|
||||
|
||||
NOTE: if you are using `gopkg.in/yaml.v3`, then you do not need such conversion. since this library
|
||||
returns `map[string]interface{}` if all keys are strings.
|
||||
812
vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
generated
vendored
812
vendor/github.com/santhosh-tekuri/jsonschema/v5/compiler.go
generated
vendored
|
|
@ -1,812 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/big"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Compiler represents a json-schema compiler.
|
||||
type Compiler struct {
|
||||
// Draft represents the draft used when '$schema' attribute is missing.
|
||||
//
|
||||
// This defaults to latest supported draft (currently 2020-12).
|
||||
Draft *Draft
|
||||
resources map[string]*resource
|
||||
|
||||
// Extensions is used to register extensions.
|
||||
extensions map[string]extension
|
||||
|
||||
// ExtractAnnotations tells whether schema annotations has to be extracted
|
||||
// in compiled Schema or not.
|
||||
ExtractAnnotations bool
|
||||
|
||||
// LoadURL loads the document at given absolute URL.
|
||||
//
|
||||
// If nil, package global LoadURL is used.
|
||||
LoadURL func(s string) (io.ReadCloser, error)
|
||||
|
||||
// Formats can be registered by adding to this map. Key is format name,
|
||||
// value is function that knows how to validate that format.
|
||||
Formats map[string]func(interface{}) bool
|
||||
|
||||
// AssertFormat for specifications >= draft2019-09.
|
||||
AssertFormat bool
|
||||
|
||||
// Decoders can be registered by adding to this map. Key is encoding name,
|
||||
// value is function that knows how to decode string in that format.
|
||||
Decoders map[string]func(string) ([]byte, error)
|
||||
|
||||
// MediaTypes can be registered by adding to this map. Key is mediaType name,
|
||||
// value is function that knows how to validate that mediaType.
|
||||
MediaTypes map[string]func([]byte) error
|
||||
|
||||
// AssertContent for specifications >= draft2019-09.
|
||||
AssertContent bool
|
||||
}
|
||||
|
||||
// Compile parses json-schema at given url returns, if successful,
|
||||
// a Schema object that can be used to match against json.
|
||||
//
|
||||
// Returned error can be *SchemaError
|
||||
func Compile(url string) (*Schema, error) {
|
||||
return NewCompiler().Compile(url)
|
||||
}
|
||||
|
||||
// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
|
||||
// It simplifies safe initialization of global variables holding compiled Schemas.
|
||||
func MustCompile(url string) *Schema {
|
||||
return NewCompiler().MustCompile(url)
|
||||
}
|
||||
|
||||
// CompileString parses and compiles the given schema with given base url.
|
||||
func CompileString(url, schema string) (*Schema, error) {
|
||||
c := NewCompiler()
|
||||
if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.Compile(url)
|
||||
}
|
||||
|
||||
// MustCompileString is like CompileString but panics on error.
|
||||
// It simplified safe initialization of global variables holding compiled Schema.
|
||||
func MustCompileString(url, schema string) *Schema {
|
||||
c := NewCompiler()
|
||||
if err := c.AddResource(url, strings.NewReader(schema)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return c.MustCompile(url)
|
||||
}
|
||||
|
||||
// NewCompiler returns a json-schema Compiler object.
|
||||
// if '$schema' attribute is missing, it is treated as draft7. to change this
|
||||
// behavior change Compiler.Draft value
|
||||
func NewCompiler() *Compiler {
|
||||
return &Compiler{
|
||||
Draft: latest,
|
||||
resources: make(map[string]*resource),
|
||||
Formats: make(map[string]func(interface{}) bool),
|
||||
Decoders: make(map[string]func(string) ([]byte, error)),
|
||||
MediaTypes: make(map[string]func([]byte) error),
|
||||
extensions: make(map[string]extension),
|
||||
}
|
||||
}
|
||||
|
||||
// AddResource adds in-memory resource to the compiler.
|
||||
//
|
||||
// Note that url must not have fragment
|
||||
func (c *Compiler) AddResource(url string, r io.Reader) error {
|
||||
res, err := newResource(url, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.resources[res.url] = res
|
||||
return nil
|
||||
}
|
||||
|
||||
// MustCompile is like Compile but panics if the url cannot be compiled to *Schema.
|
||||
// It simplifies safe initialization of global variables holding compiled Schemas.
|
||||
func (c *Compiler) MustCompile(url string) *Schema {
|
||||
s, err := c.Compile(url)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("jsonschema: %#v", err))
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Compile parses json-schema at given url returns, if successful,
|
||||
// a Schema object that can be used to match against json.
|
||||
//
|
||||
// error returned will be of type *SchemaError
|
||||
func (c *Compiler) Compile(url string) (*Schema, error) {
|
||||
// make url absolute
|
||||
u, err := toAbs(url)
|
||||
if err != nil {
|
||||
return nil, &SchemaError{url, err}
|
||||
}
|
||||
url = u
|
||||
|
||||
sch, err := c.compileURL(url, nil, "#")
|
||||
if err != nil {
|
||||
err = &SchemaError{url, err}
|
||||
}
|
||||
return sch, err
|
||||
}
|
||||
|
||||
func (c *Compiler) findResource(url string) (*resource, error) {
|
||||
if _, ok := c.resources[url]; !ok {
|
||||
// load resource
|
||||
var rdr io.Reader
|
||||
if sch, ok := vocabSchemas[url]; ok {
|
||||
rdr = strings.NewReader(sch)
|
||||
} else {
|
||||
loadURL := LoadURL
|
||||
if c.LoadURL != nil {
|
||||
loadURL = c.LoadURL
|
||||
}
|
||||
r, err := loadURL(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer r.Close()
|
||||
rdr = r
|
||||
}
|
||||
if err := c.AddResource(url, rdr); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
r := c.resources[url]
|
||||
if r.draft != nil {
|
||||
return r, nil
|
||||
}
|
||||
|
||||
// set draft
|
||||
r.draft = c.Draft
|
||||
if m, ok := r.doc.(map[string]interface{}); ok {
|
||||
if sch, ok := m["$schema"]; ok {
|
||||
sch, ok := sch.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("jsonschema: invalid $schema in %s", url)
|
||||
}
|
||||
if !isURI(sch) {
|
||||
return nil, fmt.Errorf("jsonschema: $schema must be uri in %s", url)
|
||||
}
|
||||
r.draft = findDraft(sch)
|
||||
if r.draft == nil {
|
||||
sch, _ := split(sch)
|
||||
if sch == url {
|
||||
return nil, fmt.Errorf("jsonschema: unsupported draft in %s", url)
|
||||
}
|
||||
mr, err := c.findResource(sch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.draft = mr.draft
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
id, err := r.draft.resolveID(r.url, r.doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if id != "" {
|
||||
r.url = id
|
||||
}
|
||||
|
||||
if err := r.fillSubschemas(c, r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compileURL(url string, stack []schemaRef, ptr string) (*Schema, error) {
|
||||
// if url points to a draft, return Draft.meta
|
||||
if d := findDraft(url); d != nil && d.meta != nil {
|
||||
return d.meta, nil
|
||||
}
|
||||
|
||||
b, f := split(url)
|
||||
r, err := c.findResource(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.compileRef(r, stack, ptr, r, f)
|
||||
}
|
||||
|
||||
func (c *Compiler) compileRef(r *resource, stack []schemaRef, refPtr string, res *resource, ref string) (*Schema, error) {
|
||||
base := r.baseURL(res.floc)
|
||||
ref, err := resolveURL(base, ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
u, f := split(ref)
|
||||
sr := r.findResource(u)
|
||||
if sr == nil {
|
||||
// external resource
|
||||
return c.compileURL(ref, stack, refPtr)
|
||||
}
|
||||
|
||||
// ensure root resource is always compiled first.
|
||||
// this is required to get schema.meta from root resource
|
||||
if r.schema == nil {
|
||||
r.schema = newSchema(r.url, r.floc, r.draft, r.doc)
|
||||
if _, err := c.compile(r, nil, schemaRef{"#", r.schema, false}, r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
sr, err = r.resolveFragment(c, sr, f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if sr == nil {
|
||||
return nil, fmt.Errorf("jsonschema: %s not found", ref)
|
||||
}
|
||||
|
||||
if sr.schema != nil {
|
||||
if err := checkLoop(stack, schemaRef{refPtr, sr.schema, false}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return sr.schema, nil
|
||||
}
|
||||
|
||||
sr.schema = newSchema(r.url, sr.floc, r.draft, sr.doc)
|
||||
return c.compile(r, stack, schemaRef{refPtr, sr.schema, false}, sr)
|
||||
}
|
||||
|
||||
func (c *Compiler) compileDynamicAnchors(r *resource, res *resource) error {
|
||||
if r.draft.version < 2020 {
|
||||
return nil
|
||||
}
|
||||
|
||||
rr := r.listResources(res)
|
||||
rr = append(rr, res)
|
||||
for _, sr := range rr {
|
||||
if m, ok := sr.doc.(map[string]interface{}); ok {
|
||||
if _, ok := m["$dynamicAnchor"]; ok {
|
||||
sch, err := c.compileRef(r, nil, "IGNORED", r, sr.floc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res.schema.dynamicAnchors = append(res.schema.dynamicAnchors, sch)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compile(r *resource, stack []schemaRef, sref schemaRef, res *resource) (*Schema, error) {
|
||||
if err := c.compileDynamicAnchors(r, res); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch v := res.doc.(type) {
|
||||
case bool:
|
||||
res.schema.Always = &v
|
||||
return res.schema, nil
|
||||
default:
|
||||
return res.schema, c.compileMap(r, stack, sref, res)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Compiler) compileMap(r *resource, stack []schemaRef, sref schemaRef, res *resource) error {
|
||||
m := res.doc.(map[string]interface{})
|
||||
|
||||
if err := checkLoop(stack, sref); err != nil {
|
||||
return err
|
||||
}
|
||||
stack = append(stack, sref)
|
||||
|
||||
var s = res.schema
|
||||
var err error
|
||||
|
||||
if r == res { // root schema
|
||||
if sch, ok := m["$schema"]; ok {
|
||||
sch := sch.(string)
|
||||
if d := findDraft(sch); d != nil {
|
||||
s.meta = d.meta
|
||||
} else {
|
||||
if s.meta, err = c.compileRef(r, stack, "$schema", res, sch); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ref, ok := m["$ref"]; ok {
|
||||
s.Ref, err = c.compileRef(r, stack, "$ref", res, ref.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if r.draft.version < 2019 {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 2019 {
|
||||
if r == res { // root schema
|
||||
if vocab, ok := m["$vocabulary"]; ok {
|
||||
for url, reqd := range vocab.(map[string]interface{}) {
|
||||
if reqd, ok := reqd.(bool); ok && !reqd {
|
||||
continue
|
||||
}
|
||||
if !r.draft.isVocab(url) {
|
||||
return fmt.Errorf("jsonschema: unsupported vocab %q in %s", url, res)
|
||||
}
|
||||
s.vocab = append(s.vocab, url)
|
||||
}
|
||||
} else {
|
||||
s.vocab = r.draft.defaultVocab
|
||||
}
|
||||
}
|
||||
|
||||
if ref, ok := m["$recursiveRef"]; ok {
|
||||
s.RecursiveRef, err = c.compileRef(r, stack, "$recursiveRef", res, ref.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if r.draft.version >= 2020 {
|
||||
if dref, ok := m["$dynamicRef"]; ok {
|
||||
s.DynamicRef, err = c.compileRef(r, stack, "$dynamicRef", res, dref.(string))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if dref, ok := dref.(string); ok {
|
||||
_, frag := split(dref)
|
||||
if frag != "#" && !strings.HasPrefix(frag, "#/") {
|
||||
// frag is anchor
|
||||
s.dynamicRefAnchor = frag[1:]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loadInt := func(pname string) int {
|
||||
if num, ok := m[pname]; ok {
|
||||
i, _ := num.(json.Number).Float64()
|
||||
return int(i)
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
loadRat := func(pname string) *big.Rat {
|
||||
if num, ok := m[pname]; ok {
|
||||
r, _ := new(big.Rat).SetString(string(num.(json.Number)))
|
||||
return r
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if r.draft.version < 2019 || r.schema.meta.hasVocab("validation") {
|
||||
if t, ok := m["type"]; ok {
|
||||
switch t := t.(type) {
|
||||
case string:
|
||||
s.Types = []string{t}
|
||||
case []interface{}:
|
||||
s.Types = toStrings(t)
|
||||
}
|
||||
}
|
||||
|
||||
if e, ok := m["enum"]; ok {
|
||||
s.Enum = e.([]interface{})
|
||||
allPrimitives := true
|
||||
for _, item := range s.Enum {
|
||||
switch jsonType(item) {
|
||||
case "object", "array":
|
||||
allPrimitives = false
|
||||
break
|
||||
}
|
||||
}
|
||||
s.enumError = "enum failed"
|
||||
if allPrimitives {
|
||||
if len(s.Enum) == 1 {
|
||||
s.enumError = fmt.Sprintf("value must be %#v", s.Enum[0])
|
||||
} else {
|
||||
strEnum := make([]string, len(s.Enum))
|
||||
for i, item := range s.Enum {
|
||||
strEnum[i] = fmt.Sprintf("%#v", item)
|
||||
}
|
||||
s.enumError = fmt.Sprintf("value must be one of %s", strings.Join(strEnum, ", "))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
s.Minimum = loadRat("minimum")
|
||||
if exclusive, ok := m["exclusiveMinimum"]; ok {
|
||||
if exclusive, ok := exclusive.(bool); ok {
|
||||
if exclusive {
|
||||
s.Minimum, s.ExclusiveMinimum = nil, s.Minimum
|
||||
}
|
||||
} else {
|
||||
s.ExclusiveMinimum = loadRat("exclusiveMinimum")
|
||||
}
|
||||
}
|
||||
|
||||
s.Maximum = loadRat("maximum")
|
||||
if exclusive, ok := m["exclusiveMaximum"]; ok {
|
||||
if exclusive, ok := exclusive.(bool); ok {
|
||||
if exclusive {
|
||||
s.Maximum, s.ExclusiveMaximum = nil, s.Maximum
|
||||
}
|
||||
} else {
|
||||
s.ExclusiveMaximum = loadRat("exclusiveMaximum")
|
||||
}
|
||||
}
|
||||
|
||||
s.MultipleOf = loadRat("multipleOf")
|
||||
|
||||
s.MinProperties, s.MaxProperties = loadInt("minProperties"), loadInt("maxProperties")
|
||||
|
||||
if req, ok := m["required"]; ok {
|
||||
s.Required = toStrings(req.([]interface{}))
|
||||
}
|
||||
|
||||
s.MinItems, s.MaxItems = loadInt("minItems"), loadInt("maxItems")
|
||||
|
||||
if unique, ok := m["uniqueItems"]; ok {
|
||||
s.UniqueItems = unique.(bool)
|
||||
}
|
||||
|
||||
s.MinLength, s.MaxLength = loadInt("minLength"), loadInt("maxLength")
|
||||
|
||||
if pattern, ok := m["pattern"]; ok {
|
||||
s.Pattern = regexp.MustCompile(pattern.(string))
|
||||
}
|
||||
|
||||
if r.draft.version >= 2019 {
|
||||
s.MinContains, s.MaxContains = loadInt("minContains"), loadInt("maxContains")
|
||||
if s.MinContains == -1 {
|
||||
s.MinContains = 1
|
||||
}
|
||||
|
||||
if deps, ok := m["dependentRequired"]; ok {
|
||||
deps := deps.(map[string]interface{})
|
||||
s.DependentRequired = make(map[string][]string, len(deps))
|
||||
for pname, pvalue := range deps {
|
||||
s.DependentRequired[pname] = toStrings(pvalue.([]interface{}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
compile := func(stack []schemaRef, ptr string) (*Schema, error) {
|
||||
return c.compileRef(r, stack, ptr, res, r.url+res.floc+"/"+ptr)
|
||||
}
|
||||
|
||||
loadSchema := func(pname string, stack []schemaRef) (*Schema, error) {
|
||||
if _, ok := m[pname]; ok {
|
||||
return compile(stack, escape(pname))
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
loadSchemas := func(pname string, stack []schemaRef) ([]*Schema, error) {
|
||||
if pvalue, ok := m[pname]; ok {
|
||||
pvalue := pvalue.([]interface{})
|
||||
schemas := make([]*Schema, len(pvalue))
|
||||
for i := range pvalue {
|
||||
sch, err := compile(stack, escape(pname)+"/"+strconv.Itoa(i))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
schemas[i] = sch
|
||||
}
|
||||
return schemas, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if r.draft.version < 2019 || r.schema.meta.hasVocab("applicator") {
|
||||
if s.Not, err = loadSchema("not", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.AllOf, err = loadSchemas("allOf", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.AnyOf, err = loadSchemas("anyOf", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.OneOf, err = loadSchemas("oneOf", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if props, ok := m["properties"]; ok {
|
||||
props := props.(map[string]interface{})
|
||||
s.Properties = make(map[string]*Schema, len(props))
|
||||
for pname := range props {
|
||||
s.Properties[pname], err = compile(nil, "properties/"+escape(pname))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if regexProps, ok := m["regexProperties"]; ok {
|
||||
s.RegexProperties = regexProps.(bool)
|
||||
}
|
||||
|
||||
if patternProps, ok := m["patternProperties"]; ok {
|
||||
patternProps := patternProps.(map[string]interface{})
|
||||
s.PatternProperties = make(map[*regexp.Regexp]*Schema, len(patternProps))
|
||||
for pattern := range patternProps {
|
||||
s.PatternProperties[regexp.MustCompile(pattern)], err = compile(nil, "patternProperties/"+escape(pattern))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if additionalProps, ok := m["additionalProperties"]; ok {
|
||||
switch additionalProps := additionalProps.(type) {
|
||||
case bool:
|
||||
s.AdditionalProperties = additionalProps
|
||||
case map[string]interface{}:
|
||||
s.AdditionalProperties, err = compile(nil, "additionalProperties")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if deps, ok := m["dependencies"]; ok {
|
||||
deps := deps.(map[string]interface{})
|
||||
s.Dependencies = make(map[string]interface{}, len(deps))
|
||||
for pname, pvalue := range deps {
|
||||
switch pvalue := pvalue.(type) {
|
||||
case []interface{}:
|
||||
s.Dependencies[pname] = toStrings(pvalue)
|
||||
default:
|
||||
s.Dependencies[pname], err = compile(stack, "dependencies/"+escape(pname))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 6 {
|
||||
if s.PropertyNames, err = loadSchema("propertyNames", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Contains, err = loadSchema("contains", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 7 {
|
||||
if m["if"] != nil {
|
||||
if s.If, err = loadSchema("if", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Then, err = loadSchema("then", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Else, err = loadSchema("else", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if r.draft.version >= 2019 {
|
||||
if deps, ok := m["dependentSchemas"]; ok {
|
||||
deps := deps.(map[string]interface{})
|
||||
s.DependentSchemas = make(map[string]*Schema, len(deps))
|
||||
for pname := range deps {
|
||||
s.DependentSchemas[pname], err = compile(stack, "dependentSchemas/"+escape(pname))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 2020 {
|
||||
if s.PrefixItems, err = loadSchemas("prefixItems", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.Items2020, err = loadSchema("items", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if items, ok := m["items"]; ok {
|
||||
switch items.(type) {
|
||||
case []interface{}:
|
||||
s.Items, err = loadSchemas("items", nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if additionalItems, ok := m["additionalItems"]; ok {
|
||||
switch additionalItems := additionalItems.(type) {
|
||||
case bool:
|
||||
s.AdditionalItems = additionalItems
|
||||
case map[string]interface{}:
|
||||
s.AdditionalItems, err = compile(nil, "additionalItems")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
default:
|
||||
s.Items, err = compile(nil, "items")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// unevaluatedXXX keywords were in "applicator" vocab in 2019, but moved to new vocab "unevaluated" in 2020
|
||||
if (r.draft.version == 2019 && r.schema.meta.hasVocab("applicator")) || (r.draft.version >= 2020 && r.schema.meta.hasVocab("unevaluated")) {
|
||||
if s.UnevaluatedProperties, err = loadSchema("unevaluatedProperties", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.UnevaluatedItems, err = loadSchema("unevaluatedItems", nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if r.draft.version >= 2020 {
|
||||
// any item in an array that passes validation of the contains schema is considered "evaluated"
|
||||
s.ContainsEval = true
|
||||
}
|
||||
}
|
||||
|
||||
if format, ok := m["format"]; ok {
|
||||
s.Format = format.(string)
|
||||
if r.draft.version < 2019 || c.AssertFormat || r.schema.meta.hasVocab("format-assertion") {
|
||||
if format, ok := c.Formats[s.Format]; ok {
|
||||
s.format = format
|
||||
} else {
|
||||
s.format, _ = Formats[s.Format]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.ExtractAnnotations {
|
||||
if title, ok := m["title"]; ok {
|
||||
s.Title = title.(string)
|
||||
}
|
||||
if description, ok := m["description"]; ok {
|
||||
s.Description = description.(string)
|
||||
}
|
||||
s.Default = m["default"]
|
||||
}
|
||||
|
||||
if r.draft.version >= 6 {
|
||||
if c, ok := m["const"]; ok {
|
||||
s.Constant = []interface{}{c}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 7 {
|
||||
if encoding, ok := m["contentEncoding"]; ok {
|
||||
s.ContentEncoding = encoding.(string)
|
||||
if decoder, ok := c.Decoders[s.ContentEncoding]; ok {
|
||||
s.decoder = decoder
|
||||
} else {
|
||||
s.decoder, _ = Decoders[s.ContentEncoding]
|
||||
}
|
||||
}
|
||||
if mediaType, ok := m["contentMediaType"]; ok {
|
||||
s.ContentMediaType = mediaType.(string)
|
||||
if mediaType, ok := c.MediaTypes[s.ContentMediaType]; ok {
|
||||
s.mediaType = mediaType
|
||||
} else {
|
||||
s.mediaType, _ = MediaTypes[s.ContentMediaType]
|
||||
}
|
||||
if s.ContentSchema, err = loadSchema("contentSchema", stack); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if c.ExtractAnnotations {
|
||||
if comment, ok := m["$comment"]; ok {
|
||||
s.Comment = comment.(string)
|
||||
}
|
||||
if readOnly, ok := m["readOnly"]; ok {
|
||||
s.ReadOnly = readOnly.(bool)
|
||||
}
|
||||
if writeOnly, ok := m["writeOnly"]; ok {
|
||||
s.WriteOnly = writeOnly.(bool)
|
||||
}
|
||||
if examples, ok := m["examples"]; ok {
|
||||
s.Examples = examples.([]interface{})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if r.draft.version >= 2019 {
|
||||
if !c.AssertContent {
|
||||
s.decoder = nil
|
||||
s.mediaType = nil
|
||||
s.ContentSchema = nil
|
||||
}
|
||||
if c.ExtractAnnotations {
|
||||
if deprecated, ok := m["deprecated"]; ok {
|
||||
s.Deprecated = deprecated.(bool)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for name, ext := range c.extensions {
|
||||
es, err := ext.compiler.Compile(CompilerContext{c, r, stack, res}, m)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if es != nil {
|
||||
if s.Extensions == nil {
|
||||
s.Extensions = make(map[string]ExtSchema)
|
||||
}
|
||||
s.Extensions[name] = es
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Compiler) validateSchema(r *resource, v interface{}, vloc string) error {
|
||||
validate := func(meta *Schema) error {
|
||||
if meta == nil {
|
||||
return nil
|
||||
}
|
||||
return meta.validateValue(v, vloc)
|
||||
}
|
||||
|
||||
if err := validate(r.draft.meta); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, ext := range c.extensions {
|
||||
if err := validate(ext.meta); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func toStrings(arr []interface{}) []string {
|
||||
s := make([]string, len(arr))
|
||||
for i, v := range arr {
|
||||
s[i] = v.(string)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// SchemaRef captures schema and the path referring to it.
|
||||
type schemaRef struct {
|
||||
path string // relative-json-pointer to schema
|
||||
schema *Schema // target schema
|
||||
discard bool // true when scope left
|
||||
}
|
||||
|
||||
func (sr schemaRef) String() string {
|
||||
return fmt.Sprintf("(%s)%v", sr.path, sr.schema)
|
||||
}
|
||||
|
||||
func checkLoop(stack []schemaRef, sref schemaRef) error {
|
||||
for _, ref := range stack {
|
||||
if ref.schema == sref.schema {
|
||||
return infiniteLoopError(stack, sref)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func keywordLocation(stack []schemaRef, path string) string {
|
||||
var loc string
|
||||
for _, ref := range stack[1:] {
|
||||
loc += "/" + ref.path
|
||||
}
|
||||
if path != "" {
|
||||
loc = loc + "/" + path
|
||||
}
|
||||
return loc
|
||||
}
|
||||
29
vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
generated
vendored
29
vendor/github.com/santhosh-tekuri/jsonschema/v5/content.go
generated
vendored
|
|
@ -1,29 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
// Decoders is a registry of functions, which know how to decode
|
||||
// string encoded in specific format.
|
||||
//
|
||||
// New Decoders can be registered by adding to this map. Key is encoding name,
|
||||
// value is function that knows how to decode string in that format.
|
||||
var Decoders = map[string]func(string) ([]byte, error){
|
||||
"base64": base64.StdEncoding.DecodeString,
|
||||
}
|
||||
|
||||
// MediaTypes is a registry of functions, which know how to validate
|
||||
// whether the bytes represent data of that mediaType.
|
||||
//
|
||||
// New mediaTypes can be registered by adding to this map. Key is mediaType name,
|
||||
// value is function that knows how to validate that mediaType.
|
||||
var MediaTypes = map[string]func([]byte) error{
|
||||
"application/json": validateJSON,
|
||||
}
|
||||
|
||||
func validateJSON(b []byte) error {
|
||||
var v interface{}
|
||||
return json.Unmarshal(b, &v)
|
||||
}
|
||||
49
vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
generated
vendored
49
vendor/github.com/santhosh-tekuri/jsonschema/v5/doc.go
generated
vendored
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
Package jsonschema provides json-schema compilation and validation.
|
||||
|
||||
Features:
|
||||
- implements draft 2020-12, 2019-09, draft-7, draft-6, draft-4
|
||||
- fully compliant with JSON-Schema-Test-Suite, (excluding some optional)
|
||||
- list of optional tests that are excluded can be found in schema_test.go(variable skipTests)
|
||||
- validates schemas against meta-schema
|
||||
- full support of remote references
|
||||
- support of recursive references between schemas
|
||||
- detects infinite loop in schemas
|
||||
- thread safe validation
|
||||
- rich, intuitive hierarchial error messages with json-pointers to exact location
|
||||
- supports output formats flag, basic and detailed
|
||||
- supports enabling format and content Assertions in draft2019-09 or above
|
||||
- change Compiler.AssertFormat, Compiler.AssertContent to true
|
||||
- compiled schema can be introspected. easier to develop tools like generating go structs given schema
|
||||
- supports user-defined keywords via extensions
|
||||
- implements following formats (supports user-defined)
|
||||
- date-time, date, time, duration (supports leap-second)
|
||||
- uuid, hostname, email
|
||||
- ip-address, ipv4, ipv6
|
||||
- uri, uriref, uri-template(limited validation)
|
||||
- json-pointer, relative-json-pointer
|
||||
- regex, format
|
||||
- implements following contentEncoding (supports user-defined)
|
||||
- base64
|
||||
- implements following contentMediaType (supports user-defined)
|
||||
- application/json
|
||||
- can load from files/http/https/string/[]byte/io.Reader (supports user-defined)
|
||||
|
||||
The schema is compiled against the version specified in "$schema" property.
|
||||
If "$schema" property is missing, it uses latest draft which currently implemented
|
||||
by this library.
|
||||
|
||||
You can force to use specific draft, when "$schema" is missing, as follows:
|
||||
|
||||
compiler := jsonschema.NewCompiler()
|
||||
compiler.Draft = jsonschema.Draft4
|
||||
|
||||
This package supports loading json-schema from filePath and fileURL.
|
||||
|
||||
To load json-schema from HTTPURL, add following import:
|
||||
|
||||
import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
|
||||
you can validate yaml documents. see https://play.golang.org/p/sJy1qY7dXgA
|
||||
*/
|
||||
package jsonschema
|
||||
1454
vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
generated
vendored
1454
vendor/github.com/santhosh-tekuri/jsonschema/v5/draft.go
generated
vendored
File diff suppressed because it is too large
Load diff
129
vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
generated
vendored
129
vendor/github.com/santhosh-tekuri/jsonschema/v5/errors.go
generated
vendored
|
|
@ -1,129 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// InvalidJSONTypeError is the error type returned by ValidateInterface.
|
||||
// this tells that specified go object is not valid jsonType.
|
||||
type InvalidJSONTypeError string
|
||||
|
||||
func (e InvalidJSONTypeError) Error() string {
|
||||
return fmt.Sprintf("jsonschema: invalid jsonType: %s", string(e))
|
||||
}
|
||||
|
||||
// InfiniteLoopError is returned by Compile/Validate.
|
||||
// this gives url#keywordLocation that lead to infinity loop.
|
||||
type InfiniteLoopError string
|
||||
|
||||
func (e InfiniteLoopError) Error() string {
|
||||
return "jsonschema: infinite loop " + string(e)
|
||||
}
|
||||
|
||||
func infiniteLoopError(stack []schemaRef, sref schemaRef) InfiniteLoopError {
|
||||
var path string
|
||||
for _, ref := range stack {
|
||||
if path == "" {
|
||||
path += ref.schema.Location
|
||||
} else {
|
||||
path += "/" + ref.path
|
||||
}
|
||||
}
|
||||
return InfiniteLoopError(path + "/" + sref.path)
|
||||
}
|
||||
|
||||
// SchemaError is the error type returned by Compile.
|
||||
type SchemaError struct {
|
||||
// SchemaURL is the url to json-schema that filed to compile.
|
||||
// This is helpful, if your schema refers to external schemas
|
||||
SchemaURL string
|
||||
|
||||
// Err is the error that occurred during compilation.
|
||||
// It could be ValidationError, because compilation validates
|
||||
// given schema against the json meta-schema
|
||||
Err error
|
||||
}
|
||||
|
||||
func (se *SchemaError) Unwrap() error {
|
||||
return se.Err
|
||||
}
|
||||
|
||||
func (se *SchemaError) Error() string {
|
||||
s := fmt.Sprintf("jsonschema %s compilation failed", se.SchemaURL)
|
||||
if se.Err != nil {
|
||||
return fmt.Sprintf("%s: %v", s, strings.TrimPrefix(se.Err.Error(), "jsonschema: "))
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (se *SchemaError) GoString() string {
|
||||
if _, ok := se.Err.(*ValidationError); ok {
|
||||
return fmt.Sprintf("jsonschema %s compilation failed\n%#v", se.SchemaURL, se.Err)
|
||||
}
|
||||
return se.Error()
|
||||
}
|
||||
|
||||
// ValidationError is the error type returned by Validate.
|
||||
type ValidationError struct {
|
||||
KeywordLocation string // validation path of validating keyword or schema
|
||||
AbsoluteKeywordLocation string // absolute location of validating keyword or schema
|
||||
InstanceLocation string // location of the json value within the instance being validated
|
||||
Message string // describes error
|
||||
Causes []*ValidationError // nested validation errors
|
||||
}
|
||||
|
||||
func (ve *ValidationError) add(causes ...error) error {
|
||||
for _, cause := range causes {
|
||||
ve.Causes = append(ve.Causes, cause.(*ValidationError))
|
||||
}
|
||||
return ve
|
||||
}
|
||||
|
||||
func (ve *ValidationError) causes(err error) error {
|
||||
if err := err.(*ValidationError); err.Message == "" {
|
||||
ve.Causes = err.Causes
|
||||
} else {
|
||||
ve.add(err)
|
||||
}
|
||||
return ve
|
||||
}
|
||||
|
||||
func (ve *ValidationError) Error() string {
|
||||
leaf := ve
|
||||
for len(leaf.Causes) > 0 {
|
||||
leaf = leaf.Causes[0]
|
||||
}
|
||||
u, _ := split(ve.AbsoluteKeywordLocation)
|
||||
return fmt.Sprintf("jsonschema: %s does not validate with %s: %s", quote(leaf.InstanceLocation), u+"#"+leaf.KeywordLocation, leaf.Message)
|
||||
}
|
||||
|
||||
func (ve *ValidationError) GoString() string {
|
||||
sloc := ve.AbsoluteKeywordLocation
|
||||
sloc = sloc[strings.IndexByte(sloc, '#')+1:]
|
||||
msg := fmt.Sprintf("[I#%s] [S#%s] %s", ve.InstanceLocation, sloc, ve.Message)
|
||||
for _, c := range ve.Causes {
|
||||
for _, line := range strings.Split(c.GoString(), "\n") {
|
||||
msg += "\n " + line
|
||||
}
|
||||
}
|
||||
return msg
|
||||
}
|
||||
|
||||
func joinPtr(ptr1, ptr2 string) string {
|
||||
if len(ptr1) == 0 {
|
||||
return ptr2
|
||||
}
|
||||
if len(ptr2) == 0 {
|
||||
return ptr1
|
||||
}
|
||||
return ptr1 + "/" + ptr2
|
||||
}
|
||||
|
||||
// quote returns single-quoted string
|
||||
func quote(s string) string {
|
||||
s = fmt.Sprintf("%q", s)
|
||||
s = strings.ReplaceAll(s, `\"`, `"`)
|
||||
s = strings.ReplaceAll(s, `'`, `\'`)
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
116
vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
generated
vendored
116
vendor/github.com/santhosh-tekuri/jsonschema/v5/extension.go
generated
vendored
|
|
@ -1,116 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
// ExtCompiler compiles custom keyword(s) into ExtSchema.
|
||||
type ExtCompiler interface {
|
||||
// Compile compiles the custom keywords in schema m and returns its compiled representation.
|
||||
// if the schema m does not contain the keywords defined by this extension,
|
||||
// compiled representation nil should be returned.
|
||||
Compile(ctx CompilerContext, m map[string]interface{}) (ExtSchema, error)
|
||||
}
|
||||
|
||||
// ExtSchema is schema representation of custom keyword(s)
|
||||
type ExtSchema interface {
|
||||
// Validate validates the json value v with this ExtSchema.
|
||||
// Returned error must be *ValidationError.
|
||||
Validate(ctx ValidationContext, v interface{}) error
|
||||
}
|
||||
|
||||
type extension struct {
|
||||
meta *Schema
|
||||
compiler ExtCompiler
|
||||
}
|
||||
|
||||
// RegisterExtension registers custom keyword(s) into this compiler.
|
||||
//
|
||||
// name is extension name, used only to avoid name collisions.
|
||||
// meta captures the metaschema for the new keywords.
|
||||
// This is used to validate the schema before calling ext.Compile.
|
||||
func (c *Compiler) RegisterExtension(name string, meta *Schema, ext ExtCompiler) {
|
||||
c.extensions[name] = extension{meta, ext}
|
||||
}
|
||||
|
||||
// CompilerContext ---
|
||||
|
||||
// CompilerContext provides additional context required in compiling for extension.
|
||||
type CompilerContext struct {
|
||||
c *Compiler
|
||||
r *resource
|
||||
stack []schemaRef
|
||||
res *resource
|
||||
}
|
||||
|
||||
// Compile compiles given value at ptr into *Schema. This is useful in implementing
|
||||
// keyword like allOf/not/patternProperties.
|
||||
//
|
||||
// schPath is the relative-json-pointer to the schema to be compiled from parent schema.
|
||||
//
|
||||
// applicableOnSameInstance tells whether current schema and the given schema
|
||||
// are applied on same instance value. this is used to detect infinite loop in schema.
|
||||
func (ctx CompilerContext) Compile(schPath string, applicableOnSameInstance bool) (*Schema, error) {
|
||||
var stack []schemaRef
|
||||
if applicableOnSameInstance {
|
||||
stack = ctx.stack
|
||||
}
|
||||
return ctx.c.compileRef(ctx.r, stack, schPath, ctx.res, ctx.r.url+ctx.res.floc+"/"+schPath)
|
||||
}
|
||||
|
||||
// CompileRef compiles the schema referenced by ref uri
|
||||
//
|
||||
// refPath is the relative-json-pointer to ref.
|
||||
//
|
||||
// applicableOnSameInstance tells whether current schema and the given schema
|
||||
// are applied on same instance value. this is used to detect infinite loop in schema.
|
||||
func (ctx CompilerContext) CompileRef(ref string, refPath string, applicableOnSameInstance bool) (*Schema, error) {
|
||||
var stack []schemaRef
|
||||
if applicableOnSameInstance {
|
||||
stack = ctx.stack
|
||||
}
|
||||
return ctx.c.compileRef(ctx.r, stack, refPath, ctx.res, ref)
|
||||
}
|
||||
|
||||
// ValidationContext ---
|
||||
|
||||
// ValidationContext provides additional context required in validating for extension.
|
||||
type ValidationContext struct {
|
||||
result validationResult
|
||||
validate func(sch *Schema, schPath string, v interface{}, vpath string) error
|
||||
validateInplace func(sch *Schema, schPath string) error
|
||||
validationError func(keywordPath string, format string, a ...interface{}) *ValidationError
|
||||
}
|
||||
|
||||
// EvaluatedProp marks given property of object as evaluated.
|
||||
func (ctx ValidationContext) EvaluatedProp(prop string) {
|
||||
delete(ctx.result.unevalProps, prop)
|
||||
}
|
||||
|
||||
// EvaluatedItem marks given index of array as evaluated.
|
||||
func (ctx ValidationContext) EvaluatedItem(index int) {
|
||||
delete(ctx.result.unevalItems, index)
|
||||
}
|
||||
|
||||
// Validate validates schema s with value v. Extension must use this method instead of
|
||||
// *Schema.ValidateInterface method. This will be useful in implementing keywords like
|
||||
// allOf/oneOf
|
||||
//
|
||||
// spath is relative-json-pointer to s
|
||||
// vpath is relative-json-pointer to v.
|
||||
func (ctx ValidationContext) Validate(s *Schema, spath string, v interface{}, vpath string) error {
|
||||
if vpath == "" {
|
||||
return ctx.validateInplace(s, spath)
|
||||
}
|
||||
return ctx.validate(s, spath, v, vpath)
|
||||
}
|
||||
|
||||
// Error used to construct validation error by extensions.
|
||||
//
|
||||
// keywordPath is relative-json-pointer to keyword.
|
||||
func (ctx ValidationContext) Error(keywordPath string, format string, a ...interface{}) *ValidationError {
|
||||
return ctx.validationError(keywordPath, format, a...)
|
||||
}
|
||||
|
||||
// Group is used by extensions to group multiple errors as causes to parent error.
|
||||
// This is useful in implementing keywords like allOf where each schema specified
|
||||
// in allOf can result a validationError.
|
||||
func (ValidationError) Group(parent *ValidationError, causes ...error) error {
|
||||
return parent.add(causes...)
|
||||
}
|
||||
567
vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
generated
vendored
567
vendor/github.com/santhosh-tekuri/jsonschema/v5/format.go
generated
vendored
|
|
@ -1,567 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net"
|
||||
"net/mail"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Formats is a registry of functions, which know how to validate
|
||||
// a specific format.
|
||||
//
|
||||
// New Formats can be registered by adding to this map. Key is format name,
|
||||
// value is function that knows how to validate that format.
|
||||
var Formats = map[string]func(interface{}) bool{
|
||||
"date-time": isDateTime,
|
||||
"date": isDate,
|
||||
"time": isTime,
|
||||
"duration": isDuration,
|
||||
"period": isPeriod,
|
||||
"hostname": isHostname,
|
||||
"email": isEmail,
|
||||
"ip-address": isIPV4,
|
||||
"ipv4": isIPV4,
|
||||
"ipv6": isIPV6,
|
||||
"uri": isURI,
|
||||
"iri": isURI,
|
||||
"uri-reference": isURIReference,
|
||||
"uriref": isURIReference,
|
||||
"iri-reference": isURIReference,
|
||||
"uri-template": isURITemplate,
|
||||
"regex": isRegex,
|
||||
"json-pointer": isJSONPointer,
|
||||
"relative-json-pointer": isRelativeJSONPointer,
|
||||
"uuid": isUUID,
|
||||
}
|
||||
|
||||
// isDateTime tells whether given string is a valid date representation
|
||||
// as defined by RFC 3339, section 5.6.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
|
||||
func isDateTime(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if len(s) < 20 { // yyyy-mm-ddThh:mm:ssZ
|
||||
return false
|
||||
}
|
||||
if s[10] != 'T' && s[10] != 't' {
|
||||
return false
|
||||
}
|
||||
return isDate(s[:10]) && isTime(s[11:])
|
||||
}
|
||||
|
||||
// isDate tells whether given string is a valid full-date production
|
||||
// as defined by RFC 3339, section 5.6.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
|
||||
func isDate(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
_, err := time.Parse("2006-01-02", s)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// isTime tells whether given string is a valid full-time production
|
||||
// as defined by RFC 3339, section 5.6.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6, for details
|
||||
func isTime(v interface{}) bool {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
|
||||
// golang time package does not support leap seconds.
|
||||
// so we are parsing it manually here.
|
||||
|
||||
// hh:mm:ss
|
||||
// 01234567
|
||||
if len(str) < 9 || str[2] != ':' || str[5] != ':' {
|
||||
return false
|
||||
}
|
||||
isInRange := func(str string, min, max int) (int, bool) {
|
||||
n, err := strconv.Atoi(str)
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
if n < min || n > max {
|
||||
return 0, false
|
||||
}
|
||||
return n, true
|
||||
}
|
||||
var h, m, s int
|
||||
if h, ok = isInRange(str[0:2], 0, 23); !ok {
|
||||
return false
|
||||
}
|
||||
if m, ok = isInRange(str[3:5], 0, 59); !ok {
|
||||
return false
|
||||
}
|
||||
if s, ok = isInRange(str[6:8], 0, 60); !ok {
|
||||
return false
|
||||
}
|
||||
str = str[8:]
|
||||
|
||||
// parse secfrac if present
|
||||
if str[0] == '.' {
|
||||
// dot following more than one digit
|
||||
str = str[1:]
|
||||
var numDigits int
|
||||
for str != "" {
|
||||
if str[0] < '0' || str[0] > '9' {
|
||||
break
|
||||
}
|
||||
numDigits++
|
||||
str = str[1:]
|
||||
}
|
||||
if numDigits == 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if len(str) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
if str[0] == 'z' || str[0] == 'Z' {
|
||||
if len(str) != 1 {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
// time-numoffset
|
||||
// +hh:mm
|
||||
// 012345
|
||||
if len(str) != 6 || str[3] != ':' {
|
||||
return false
|
||||
}
|
||||
|
||||
var sign int
|
||||
if str[0] == '+' {
|
||||
sign = -1
|
||||
} else if str[0] == '-' {
|
||||
sign = +1
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
|
||||
var zh, zm int
|
||||
if zh, ok = isInRange(str[1:3], 0, 23); !ok {
|
||||
return false
|
||||
}
|
||||
if zm, ok = isInRange(str[4:6], 0, 59); !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
// apply timezone offset
|
||||
hm := (h*60 + m) + sign*(zh*60+zm)
|
||||
if hm < 0 {
|
||||
hm += 24 * 60
|
||||
}
|
||||
h, m = hm/60, hm%60
|
||||
}
|
||||
|
||||
// check leapsecond
|
||||
if s == 60 { // leap second
|
||||
if h != 23 || m != 59 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// isDuration tells whether given string is a valid duration format
|
||||
// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
|
||||
func isDuration(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if len(s) == 0 || s[0] != 'P' {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
parseUnits := func() (units string, ok bool) {
|
||||
for len(s) > 0 && s[0] != 'T' {
|
||||
digits := false
|
||||
for {
|
||||
if len(s) == 0 {
|
||||
break
|
||||
}
|
||||
if s[0] < '0' || s[0] > '9' {
|
||||
break
|
||||
}
|
||||
digits = true
|
||||
s = s[1:]
|
||||
}
|
||||
if !digits || len(s) == 0 {
|
||||
return units, false
|
||||
}
|
||||
units += s[:1]
|
||||
s = s[1:]
|
||||
}
|
||||
return units, true
|
||||
}
|
||||
units, ok := parseUnits()
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
if units == "W" {
|
||||
return len(s) == 0 // P_W
|
||||
}
|
||||
if len(units) > 0 {
|
||||
if strings.Index("YMD", units) == -1 {
|
||||
return false
|
||||
}
|
||||
if len(s) == 0 {
|
||||
return true // "P" dur-date
|
||||
}
|
||||
}
|
||||
if len(s) == 0 || s[0] != 'T' {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
units, ok = parseUnits()
|
||||
return ok && len(s) == 0 && len(units) > 0 && strings.Index("HMS", units) != -1
|
||||
}
|
||||
|
||||
// isPeriod tells whether given string is a valid period format
|
||||
// from the ISO 8601 ABNF as given in Appendix A of RFC 3339.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A, for details
|
||||
func isPeriod(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
slash := strings.IndexByte(s, '/')
|
||||
if slash == -1 {
|
||||
return false
|
||||
}
|
||||
start, end := s[:slash], s[slash+1:]
|
||||
if isDateTime(start) {
|
||||
return isDateTime(end) || isDuration(end)
|
||||
}
|
||||
return isDuration(start) && isDateTime(end)
|
||||
}
|
||||
|
||||
// isHostname tells whether given string is a valid representation
|
||||
// for an Internet host name, as defined by RFC 1034 section 3.1 and
|
||||
// RFC 1123 section 2.1.
|
||||
//
|
||||
// See https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names, for details.
|
||||
func isHostname(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
// entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters
|
||||
s = strings.TrimSuffix(s, ".")
|
||||
if len(s) > 253 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Hostnames are composed of series of labels concatenated with dots, as are all domain names
|
||||
for _, label := range strings.Split(s, ".") {
|
||||
// Each label must be from 1 to 63 characters long
|
||||
if labelLen := len(label); labelLen < 1 || labelLen > 63 {
|
||||
return false
|
||||
}
|
||||
|
||||
// labels must not start with a hyphen
|
||||
// RFC 1123 section 2.1: restriction on the first character
|
||||
// is relaxed to allow either a letter or a digit
|
||||
if first := s[0]; first == '-' {
|
||||
return false
|
||||
}
|
||||
|
||||
// must not end with a hyphen
|
||||
if label[len(label)-1] == '-' {
|
||||
return false
|
||||
}
|
||||
|
||||
// labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner),
|
||||
// the digits '0' through '9', and the hyphen ('-')
|
||||
for _, c := range label {
|
||||
if valid := (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '-'); !valid {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// isEmail tells whether given string is a valid Internet email address
|
||||
// as defined by RFC 5322, section 3.4.1.
|
||||
//
|
||||
// See https://en.wikipedia.org/wiki/Email_address, for details.
|
||||
func isEmail(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
// entire email address to be no more than 254 characters long
|
||||
if len(s) > 254 {
|
||||
return false
|
||||
}
|
||||
|
||||
// email address is generally recognized as having two parts joined with an at-sign
|
||||
at := strings.LastIndexByte(s, '@')
|
||||
if at == -1 {
|
||||
return false
|
||||
}
|
||||
local := s[0:at]
|
||||
domain := s[at+1:]
|
||||
|
||||
// local part may be up to 64 characters long
|
||||
if len(local) > 64 {
|
||||
return false
|
||||
}
|
||||
|
||||
// domain if enclosed in brackets, must match an IP address
|
||||
if len(domain) >= 2 && domain[0] == '[' && domain[len(domain)-1] == ']' {
|
||||
ip := domain[1 : len(domain)-1]
|
||||
if strings.HasPrefix(ip, "IPv6:") {
|
||||
return isIPV6(strings.TrimPrefix(ip, "IPv6:"))
|
||||
}
|
||||
return isIPV4(ip)
|
||||
}
|
||||
|
||||
// domain must match the requirements for a hostname
|
||||
if !isHostname(domain) {
|
||||
return false
|
||||
}
|
||||
|
||||
_, err := mail.ParseAddress(s)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// isIPV4 tells whether given string is a valid representation of an IPv4 address
|
||||
// according to the "dotted-quad" ABNF syntax as defined in RFC 2673, section 3.2.
|
||||
func isIPV4(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
groups := strings.Split(s, ".")
|
||||
if len(groups) != 4 {
|
||||
return false
|
||||
}
|
||||
for _, group := range groups {
|
||||
n, err := strconv.Atoi(group)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
if n < 0 || n > 255 {
|
||||
return false
|
||||
}
|
||||
if n != 0 && group[0] == '0' {
|
||||
return false // leading zeroes should be rejected, as they are treated as octals
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isIPV6 tells whether given string is a valid representation of an IPv6 address
|
||||
// as defined in RFC 2373, section 2.2.
|
||||
func isIPV6(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if !strings.Contains(s, ":") {
|
||||
return false
|
||||
}
|
||||
return net.ParseIP(s) != nil
|
||||
}
|
||||
|
||||
// isURI tells whether given string is valid URI, according to RFC 3986.
|
||||
func isURI(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
u, err := urlParse(s)
|
||||
return err == nil && u.IsAbs()
|
||||
}
|
||||
|
||||
func urlParse(s string) (*url.URL, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if hostname is ipv6, validate it
|
||||
hostname := u.Hostname()
|
||||
if strings.IndexByte(hostname, ':') != -1 {
|
||||
if strings.IndexByte(u.Host, '[') == -1 || strings.IndexByte(u.Host, ']') == -1 {
|
||||
return nil, errors.New("ipv6 address is not enclosed in brackets")
|
||||
}
|
||||
if !isIPV6(hostname) {
|
||||
return nil, errors.New("invalid ipv6 address")
|
||||
}
|
||||
}
|
||||
return u, nil
|
||||
}
|
||||
|
||||
// isURIReference tells whether given string is a valid URI Reference
|
||||
// (either a URI or a relative-reference), according to RFC 3986.
|
||||
func isURIReference(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
_, err := urlParse(s)
|
||||
return err == nil && !strings.Contains(s, `\`)
|
||||
}
|
||||
|
||||
// isURITemplate tells whether given string is a valid URI Template
|
||||
// according to RFC6570.
|
||||
//
|
||||
// Current implementation does minimal validation.
|
||||
func isURITemplate(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
u, err := urlParse(s)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
for _, item := range strings.Split(u.RawPath, "/") {
|
||||
depth := 0
|
||||
for _, ch := range item {
|
||||
switch ch {
|
||||
case '{':
|
||||
depth++
|
||||
if depth != 1 {
|
||||
return false
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
if depth != 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
if depth != 0 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isRegex tells whether given string is a valid regular expression,
|
||||
// according to the ECMA 262 regular expression dialect.
|
||||
//
|
||||
// The implementation uses go-lang regexp package.
|
||||
func isRegex(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
_, err := regexp.Compile(s)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// isJSONPointer tells whether given string is a valid JSON Pointer.
|
||||
//
|
||||
// Note: It returns false for JSON Pointer URI fragments.
|
||||
func isJSONPointer(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if s != "" && !strings.HasPrefix(s, "/") {
|
||||
return false
|
||||
}
|
||||
for _, item := range strings.Split(s, "/") {
|
||||
for i := 0; i < len(item); i++ {
|
||||
if item[i] == '~' {
|
||||
if i == len(item)-1 {
|
||||
return false
|
||||
}
|
||||
switch item[i+1] {
|
||||
case '0', '1':
|
||||
// valid
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// isRelativeJSONPointer tells whether given string is a valid Relative JSON Pointer.
|
||||
//
|
||||
// see https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
func isRelativeJSONPointer(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
if s == "" {
|
||||
return false
|
||||
}
|
||||
if s[0] == '0' {
|
||||
s = s[1:]
|
||||
} else if s[0] >= '0' && s[0] <= '9' {
|
||||
for s != "" && s[0] >= '0' && s[0] <= '9' {
|
||||
s = s[1:]
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
return s == "#" || isJSONPointer(s)
|
||||
}
|
||||
|
||||
// isUUID tells whether given string is a valid uuid format
|
||||
// as specified in RFC4122.
|
||||
//
|
||||
// see https://datatracker.ietf.org/doc/html/rfc4122#page-4, for details
|
||||
func isUUID(v interface{}) bool {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
parseHex := func(n int) bool {
|
||||
for n > 0 {
|
||||
if len(s) == 0 {
|
||||
return false
|
||||
}
|
||||
hex := (s[0] >= '0' && s[0] <= '9') || (s[0] >= 'a' && s[0] <= 'f') || (s[0] >= 'A' && s[0] <= 'F')
|
||||
if !hex {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
n--
|
||||
}
|
||||
return true
|
||||
}
|
||||
groups := []int{8, 4, 4, 4, 12}
|
||||
for i, numDigits := range groups {
|
||||
if !parseHex(numDigits) {
|
||||
return false
|
||||
}
|
||||
if i == len(groups)-1 {
|
||||
break
|
||||
}
|
||||
if len(s) == 0 || s[0] != '-' {
|
||||
return false
|
||||
}
|
||||
s = s[1:]
|
||||
}
|
||||
return len(s) == 0
|
||||
}
|
||||
38
vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go
generated
vendored
38
vendor/github.com/santhosh-tekuri/jsonschema/v5/httploader/httploader.go
generated
vendored
|
|
@ -1,38 +0,0 @@
|
|||
// Package httploader implements loader.Loader for http/https url.
|
||||
//
|
||||
// The package is typically only imported for the side effect of
|
||||
// registering its Loaders.
|
||||
//
|
||||
// To use httploader, link this package into your program:
|
||||
//
|
||||
// import _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
|
||||
package httploader
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v5"
|
||||
)
|
||||
|
||||
// Client is the default HTTP Client used to Get the resource.
|
||||
var Client = http.DefaultClient
|
||||
|
||||
// Load loads resource from given http(s) url.
|
||||
func Load(url string) (io.ReadCloser, error) {
|
||||
resp, err := Client.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
_ = resp.Body.Close()
|
||||
return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode)
|
||||
}
|
||||
return resp.Body, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
jsonschema.Loaders["http"] = Load
|
||||
jsonschema.Loaders["https"] = Load
|
||||
}
|
||||
60
vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
generated
vendored
60
vendor/github.com/santhosh-tekuri/jsonschema/v5/loader.go
generated
vendored
|
|
@ -1,60 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func loadFileURL(s string) (io.ReadCloser, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f := u.Path
|
||||
if runtime.GOOS == "windows" {
|
||||
f = strings.TrimPrefix(f, "/")
|
||||
f = filepath.FromSlash(f)
|
||||
}
|
||||
return os.Open(f)
|
||||
}
|
||||
|
||||
// Loaders is a registry of functions, which know how to load
|
||||
// absolute url of specific schema.
|
||||
//
|
||||
// New loaders can be registered by adding to this map. Key is schema,
|
||||
// value is function that knows how to load url of that schema
|
||||
var Loaders = map[string]func(url string) (io.ReadCloser, error){
|
||||
"file": loadFileURL,
|
||||
}
|
||||
|
||||
// LoaderNotFoundError is the error type returned by Load function.
|
||||
// It tells that no Loader is registered for that URL Scheme.
|
||||
type LoaderNotFoundError string
|
||||
|
||||
func (e LoaderNotFoundError) Error() string {
|
||||
return fmt.Sprintf("jsonschema: no Loader found for %s", string(e))
|
||||
}
|
||||
|
||||
// LoadURL loads document at given absolute URL. The default implementation
|
||||
// uses Loaders registry to lookup by schema and uses that loader.
|
||||
//
|
||||
// Users can change this variable, if they would like to take complete
|
||||
// responsibility of loading given URL. Used by Compiler if its LoadURL
|
||||
// field is nil.
|
||||
var LoadURL = func(s string) (io.ReadCloser, error) {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
loader, ok := Loaders[u.Scheme]
|
||||
if !ok {
|
||||
return nil, LoaderNotFoundError(s)
|
||||
|
||||
}
|
||||
return loader(s)
|
||||
}
|
||||
77
vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
generated
vendored
77
vendor/github.com/santhosh-tekuri/jsonschema/v5/output.go
generated
vendored
|
|
@ -1,77 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
// Flag is output format with simple boolean property valid.
|
||||
type Flag struct {
|
||||
Valid bool `json:"valid"`
|
||||
}
|
||||
|
||||
// FlagOutput returns output in flag format
|
||||
func (ve *ValidationError) FlagOutput() Flag {
|
||||
return Flag{}
|
||||
}
|
||||
|
||||
// Basic ---
|
||||
|
||||
// Basic is output format with flat list of output units.
|
||||
type Basic struct {
|
||||
Valid bool `json:"valid"`
|
||||
Errors []BasicError `json:"errors"`
|
||||
}
|
||||
|
||||
// BasicError is output unit in basic format.
|
||||
type BasicError struct {
|
||||
KeywordLocation string `json:"keywordLocation"`
|
||||
AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
|
||||
InstanceLocation string `json:"instanceLocation"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
// BasicOutput returns output in basic format
|
||||
func (ve *ValidationError) BasicOutput() Basic {
|
||||
var errors []BasicError
|
||||
var flatten func(*ValidationError)
|
||||
flatten = func(ve *ValidationError) {
|
||||
errors = append(errors, BasicError{
|
||||
KeywordLocation: ve.KeywordLocation,
|
||||
AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
|
||||
InstanceLocation: ve.InstanceLocation,
|
||||
Error: ve.Message,
|
||||
})
|
||||
for _, cause := range ve.Causes {
|
||||
flatten(cause)
|
||||
}
|
||||
}
|
||||
flatten(ve)
|
||||
return Basic{Errors: errors}
|
||||
}
|
||||
|
||||
// Detailed ---
|
||||
|
||||
// Detailed is output format based on structure of schema.
|
||||
type Detailed struct {
|
||||
Valid bool `json:"valid"`
|
||||
KeywordLocation string `json:"keywordLocation"`
|
||||
AbsoluteKeywordLocation string `json:"absoluteKeywordLocation"`
|
||||
InstanceLocation string `json:"instanceLocation"`
|
||||
Error string `json:"error,omitempty"`
|
||||
Errors []Detailed `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
// DetailedOutput returns output in detailed format
|
||||
func (ve *ValidationError) DetailedOutput() Detailed {
|
||||
var errors []Detailed
|
||||
for _, cause := range ve.Causes {
|
||||
errors = append(errors, cause.DetailedOutput())
|
||||
}
|
||||
var message = ve.Message
|
||||
if len(ve.Causes) > 0 {
|
||||
message = ""
|
||||
}
|
||||
return Detailed{
|
||||
KeywordLocation: ve.KeywordLocation,
|
||||
AbsoluteKeywordLocation: ve.AbsoluteKeywordLocation,
|
||||
InstanceLocation: ve.InstanceLocation,
|
||||
Error: message,
|
||||
Errors: errors,
|
||||
}
|
||||
}
|
||||
280
vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
generated
vendored
280
vendor/github.com/santhosh-tekuri/jsonschema/v5/resource.go
generated
vendored
|
|
@ -1,280 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type resource struct {
|
||||
url string // base url of resource. can be empty
|
||||
floc string // fragment with json-pointer from root resource
|
||||
doc interface{}
|
||||
draft *Draft
|
||||
subresources map[string]*resource // key is floc. only applicable for root resource
|
||||
schema *Schema
|
||||
}
|
||||
|
||||
func (r *resource) String() string {
|
||||
return r.url + r.floc
|
||||
}
|
||||
|
||||
func newResource(url string, r io.Reader) (*resource, error) {
|
||||
if strings.IndexByte(url, '#') != -1 {
|
||||
panic(fmt.Sprintf("BUG: newResource(%q)", url))
|
||||
}
|
||||
doc, err := unmarshal(r)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("jsonschema: invalid json %s: %v", url, err)
|
||||
}
|
||||
url, err = toAbs(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resource{
|
||||
url: url,
|
||||
floc: "#",
|
||||
doc: doc,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// fillSubschemas fills subschemas in res into r.subresources
|
||||
func (r *resource) fillSubschemas(c *Compiler, res *resource) error {
|
||||
if err := c.validateSchema(r, res.doc, res.floc[1:]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if r.subresources == nil {
|
||||
r.subresources = make(map[string]*resource)
|
||||
}
|
||||
if err := r.draft.listSubschemas(res, r.baseURL(res.floc), r.subresources); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// ensure subresource.url uniqueness
|
||||
url2floc := make(map[string]string)
|
||||
for _, sr := range r.subresources {
|
||||
if sr.url != "" {
|
||||
if floc, ok := url2floc[sr.url]; ok {
|
||||
return fmt.Errorf("jsonschema: %q and %q in %s have same canonical-uri", floc[1:], sr.floc[1:], r.url)
|
||||
}
|
||||
url2floc[sr.url] = sr.floc
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// listResources lists all subresources in res
|
||||
func (r *resource) listResources(res *resource) []*resource {
|
||||
var result []*resource
|
||||
prefix := res.floc + "/"
|
||||
for _, sr := range r.subresources {
|
||||
if strings.HasPrefix(sr.floc, prefix) {
|
||||
result = append(result, sr)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (r *resource) findResource(url string) *resource {
|
||||
if r.url == url {
|
||||
return r
|
||||
}
|
||||
for _, res := range r.subresources {
|
||||
if res.url == url {
|
||||
return res
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// resolve fragment f with sr as base
|
||||
func (r *resource) resolveFragment(c *Compiler, sr *resource, f string) (*resource, error) {
|
||||
if f == "#" || f == "#/" {
|
||||
return sr, nil
|
||||
}
|
||||
|
||||
// resolve by anchor
|
||||
if !strings.HasPrefix(f, "#/") {
|
||||
// check in given resource
|
||||
for _, anchor := range r.draft.anchors(sr.doc) {
|
||||
if anchor == f[1:] {
|
||||
return sr, nil
|
||||
}
|
||||
}
|
||||
|
||||
// check in subresources that has same base url
|
||||
prefix := sr.floc + "/"
|
||||
for _, res := range r.subresources {
|
||||
if strings.HasPrefix(res.floc, prefix) && r.baseURL(res.floc) == sr.url {
|
||||
for _, anchor := range r.draft.anchors(res.doc) {
|
||||
if anchor == f[1:] {
|
||||
return res, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// resolve by ptr
|
||||
floc := sr.floc + f[1:]
|
||||
if res, ok := r.subresources[floc]; ok {
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// non-standrad location
|
||||
doc := r.doc
|
||||
for _, item := range strings.Split(floc[2:], "/") {
|
||||
item = strings.Replace(item, "~1", "/", -1)
|
||||
item = strings.Replace(item, "~0", "~", -1)
|
||||
item, err := url.PathUnescape(item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch d := doc.(type) {
|
||||
case map[string]interface{}:
|
||||
if _, ok := d[item]; !ok {
|
||||
return nil, nil
|
||||
}
|
||||
doc = d[item]
|
||||
case []interface{}:
|
||||
index, err := strconv.Atoi(item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if index < 0 || index >= len(d) {
|
||||
return nil, nil
|
||||
}
|
||||
doc = d[index]
|
||||
default:
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
id, err := r.draft.resolveID(r.baseURL(floc), doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res := &resource{url: id, floc: floc, doc: doc}
|
||||
r.subresources[floc] = res
|
||||
if err := r.fillSubschemas(c, res); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (r *resource) baseURL(floc string) string {
|
||||
for {
|
||||
if sr, ok := r.subresources[floc]; ok {
|
||||
if sr.url != "" {
|
||||
return sr.url
|
||||
}
|
||||
}
|
||||
slash := strings.LastIndexByte(floc, '/')
|
||||
if slash == -1 {
|
||||
break
|
||||
}
|
||||
floc = floc[:slash]
|
||||
}
|
||||
return r.url
|
||||
}
|
||||
|
||||
// url helpers ---
|
||||
|
||||
func toAbs(s string) (string, error) {
|
||||
// if windows absolute file path, convert to file url
|
||||
// because: net/url parses driver name as scheme
|
||||
if runtime.GOOS == "windows" && len(s) >= 3 && s[1:3] == `:\` {
|
||||
s = "file:///" + filepath.ToSlash(s)
|
||||
}
|
||||
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if u.IsAbs() {
|
||||
return s, nil
|
||||
}
|
||||
|
||||
// s is filepath
|
||||
if s, err = filepath.Abs(s); err != nil {
|
||||
return "", err
|
||||
}
|
||||
if runtime.GOOS == "windows" {
|
||||
s = "file:///" + filepath.ToSlash(s)
|
||||
} else {
|
||||
s = "file://" + s
|
||||
}
|
||||
u, err = url.Parse(s) // to fix spaces in filepath
|
||||
return u.String(), err
|
||||
}
|
||||
|
||||
func resolveURL(base, ref string) (string, error) {
|
||||
if ref == "" {
|
||||
return base, nil
|
||||
}
|
||||
if strings.HasPrefix(ref, "urn:") {
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
refURL, err := url.Parse(ref)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if refURL.IsAbs() {
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
if strings.HasPrefix(base, "urn:") {
|
||||
base, _ = split(base)
|
||||
return base + ref, nil
|
||||
}
|
||||
|
||||
baseURL, err := url.Parse(base)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return baseURL.ResolveReference(refURL).String(), nil
|
||||
}
|
||||
|
||||
func split(uri string) (string, string) {
|
||||
hash := strings.IndexByte(uri, '#')
|
||||
if hash == -1 {
|
||||
return uri, "#"
|
||||
}
|
||||
f := uri[hash:]
|
||||
if f == "#/" {
|
||||
f = "#"
|
||||
}
|
||||
return uri[0:hash], f
|
||||
}
|
||||
|
||||
func (s *Schema) url() string {
|
||||
u, _ := split(s.Location)
|
||||
return u
|
||||
}
|
||||
|
||||
func (s *Schema) loc() string {
|
||||
_, f := split(s.Location)
|
||||
return f[1:]
|
||||
}
|
||||
|
||||
func unmarshal(r io.Reader) (interface{}, error) {
|
||||
decoder := json.NewDecoder(r)
|
||||
decoder.UseNumber()
|
||||
var doc interface{}
|
||||
if err := decoder.Decode(&doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if t, _ := decoder.Token(); t != nil {
|
||||
return nil, fmt.Errorf("invalid character %v after top-level value", t)
|
||||
}
|
||||
return doc, nil
|
||||
}
|
||||
900
vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
generated
vendored
900
vendor/github.com/santhosh-tekuri/jsonschema/v5/schema.go
generated
vendored
|
|
@ -1,900 +0,0 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"math/big"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// A Schema represents compiled version of json-schema.
|
||||
type Schema struct {
|
||||
Location string // absolute location
|
||||
|
||||
Draft *Draft // draft used by schema.
|
||||
meta *Schema
|
||||
vocab []string
|
||||
dynamicAnchors []*Schema
|
||||
|
||||
// type agnostic validations
|
||||
Format string
|
||||
format func(interface{}) bool
|
||||
Always *bool // always pass/fail. used when booleans are used as schemas in draft-07.
|
||||
Ref *Schema
|
||||
RecursiveAnchor bool
|
||||
RecursiveRef *Schema
|
||||
DynamicAnchor string
|
||||
DynamicRef *Schema
|
||||
dynamicRefAnchor string
|
||||
Types []string // allowed types.
|
||||
Constant []interface{} // first element in slice is constant value. note: slice is used to capture nil constant.
|
||||
Enum []interface{} // allowed values.
|
||||
enumError string // error message for enum fail. captured here to avoid constructing error message every time.
|
||||
Not *Schema
|
||||
AllOf []*Schema
|
||||
AnyOf []*Schema
|
||||
OneOf []*Schema
|
||||
If *Schema
|
||||
Then *Schema // nil, when If is nil.
|
||||
Else *Schema // nil, when If is nil.
|
||||
|
||||
// object validations
|
||||
MinProperties int // -1 if not specified.
|
||||
MaxProperties int // -1 if not specified.
|
||||
Required []string // list of required properties.
|
||||
Properties map[string]*Schema
|
||||
PropertyNames *Schema
|
||||
RegexProperties bool // property names must be valid regex. used only in draft4 as workaround in metaschema.
|
||||
PatternProperties map[*regexp.Regexp]*Schema
|
||||
AdditionalProperties interface{} // nil or bool or *Schema.
|
||||
Dependencies map[string]interface{} // map value is *Schema or []string.
|
||||
DependentRequired map[string][]string
|
||||
DependentSchemas map[string]*Schema
|
||||
UnevaluatedProperties *Schema
|
||||
|
||||
// array validations
|
||||
MinItems int // -1 if not specified.
|
||||
MaxItems int // -1 if not specified.
|
||||
UniqueItems bool
|
||||
Items interface{} // nil or *Schema or []*Schema
|
||||
AdditionalItems interface{} // nil or bool or *Schema.
|
||||
PrefixItems []*Schema
|
||||
Items2020 *Schema // items keyword reintroduced in draft 2020-12
|
||||
Contains *Schema
|
||||
ContainsEval bool // whether any item in an array that passes validation of the contains schema is considered "evaluated"
|
||||
MinContains int // 1 if not specified
|
||||
MaxContains int // -1 if not specified
|
||||
UnevaluatedItems *Schema
|
||||
|
||||
// string validations
|
||||
MinLength int // -1 if not specified.
|
||||
MaxLength int // -1 if not specified.
|
||||
Pattern *regexp.Regexp
|
||||
ContentEncoding string
|
||||
decoder func(string) ([]byte, error)
|
||||
ContentMediaType string
|
||||
mediaType func([]byte) error
|
||||
ContentSchema *Schema
|
||||
|
||||
// number validators
|
||||
Minimum *big.Rat
|
||||
ExclusiveMinimum *big.Rat
|
||||
Maximum *big.Rat
|
||||
ExclusiveMaximum *big.Rat
|
||||
MultipleOf *big.Rat
|
||||
|
||||
// annotations. captured only when Compiler.ExtractAnnotations is true.
|
||||
Title string
|
||||
Description string
|
||||
Default interface{}
|
||||
Comment string
|
||||
ReadOnly bool
|
||||
WriteOnly bool
|
||||
Examples []interface{}
|
||||
Deprecated bool
|
||||
|
||||
// user defined extensions
|
||||
Extensions map[string]ExtSchema
|
||||
}
|
||||
|
||||
func (s *Schema) String() string {
|
||||
return s.Location
|
||||
}
|
||||
|
||||
func newSchema(url, floc string, draft *Draft, doc interface{}) *Schema {
|
||||
// fill with default values
|
||||
s := &Schema{
|
||||
Location: url + floc,
|
||||
Draft: draft,
|
||||
MinProperties: -1,
|
||||
MaxProperties: -1,
|
||||
MinItems: -1,
|
||||
MaxItems: -1,
|
||||
MinContains: 1,
|
||||
MaxContains: -1,
|
||||
MinLength: -1,
|
||||
MaxLength: -1,
|
||||
}
|
||||
|
||||
if doc, ok := doc.(map[string]interface{}); ok {
|
||||
if ra, ok := doc["$recursiveAnchor"]; ok {
|
||||
if ra, ok := ra.(bool); ok {
|
||||
s.RecursiveAnchor = ra
|
||||
}
|
||||
}
|
||||
if da, ok := doc["$dynamicAnchor"]; ok {
|
||||
if da, ok := da.(string); ok {
|
||||
s.DynamicAnchor = da
|
||||
}
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *Schema) hasVocab(name string) bool {
|
||||
if s == nil { // during bootstrap
|
||||
return true
|
||||
}
|
||||
if name == "core" {
|
||||
return true
|
||||
}
|
||||
for _, url := range s.vocab {
|
||||
if url == "https://json-schema.org/draft/2019-09/vocab/"+name {
|
||||
return true
|
||||
}
|
||||
if url == "https://json-schema.org/draft/2020-12/vocab/"+name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Validate validates given doc, against the json-schema s.
|
||||
//
|
||||
// the v must be the raw json value. for number precision
|
||||
// unmarshal with json.UseNumber().
|
||||
//
|
||||
// returns *ValidationError if v does not confirm with schema s.
|
||||
// returns InfiniteLoopError if it detects loop during validation.
|
||||
// returns InvalidJSONTypeError if it detects any non json value in v.
|
||||
func (s *Schema) Validate(v interface{}) (err error) {
|
||||
return s.validateValue(v, "")
|
||||
}
|
||||
|
||||
func (s *Schema) validateValue(v interface{}, vloc string) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
switch r := r.(type) {
|
||||
case InfiniteLoopError, InvalidJSONTypeError:
|
||||
err = r.(error)
|
||||
default:
|
||||
panic(r)
|
||||
}
|
||||
}
|
||||
}()
|
||||
if _, err := s.validate(nil, 0, "", v, vloc); err != nil {
|
||||
ve := ValidationError{
|
||||
KeywordLocation: "",
|
||||
AbsoluteKeywordLocation: s.Location,
|
||||
InstanceLocation: vloc,
|
||||
Message: fmt.Sprintf("doesn't validate with %s", s.Location),
|
||||
}
|
||||
return ve.causes(err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// validate validates given value v with this schema.
|
||||
func (s *Schema) validate(scope []schemaRef, vscope int, spath string, v interface{}, vloc string) (result validationResult, err error) {
|
||||
validationError := func(keywordPath string, format string, a ...interface{}) *ValidationError {
|
||||
return &ValidationError{
|
||||
KeywordLocation: keywordLocation(scope, keywordPath),
|
||||
AbsoluteKeywordLocation: joinPtr(s.Location, keywordPath),
|
||||
InstanceLocation: vloc,
|
||||
Message: fmt.Sprintf(format, a...),
|
||||
}
|
||||
}
|
||||
|
||||
sref := schemaRef{spath, s, false}
|
||||
if err := checkLoop(scope[len(scope)-vscope:], sref); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
scope = append(scope, sref)
|
||||
vscope++
|
||||
|
||||
// populate result
|
||||
switch v := v.(type) {
|
||||
case map[string]interface{}:
|
||||
result.unevalProps = make(map[string]struct{})
|
||||
for pname := range v {
|
||||
result.unevalProps[pname] = struct{}{}
|
||||
}
|
||||
case []interface{}:
|
||||
result.unevalItems = make(map[int]struct{})
|
||||
for i := range v {
|
||||
result.unevalItems[i] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
validate := func(sch *Schema, schPath string, v interface{}, vpath string) error {
|
||||
vloc := vloc
|
||||
if vpath != "" {
|
||||
vloc += "/" + vpath
|
||||
}
|
||||
_, err := sch.validate(scope, 0, schPath, v, vloc)
|
||||
return err
|
||||
}
|
||||
|
||||
validateInplace := func(sch *Schema, schPath string) error {
|
||||
vr, err := sch.validate(scope, vscope, schPath, v, vloc)
|
||||
if err == nil {
|
||||
// update result
|
||||
for pname := range result.unevalProps {
|
||||
if _, ok := vr.unevalProps[pname]; !ok {
|
||||
delete(result.unevalProps, pname)
|
||||
}
|
||||
}
|
||||
for i := range result.unevalItems {
|
||||
if _, ok := vr.unevalItems[i]; !ok {
|
||||
delete(result.unevalItems, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if s.Always != nil {
|
||||
if !*s.Always {
|
||||
return result, validationError("", "not allowed")
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
if len(s.Types) > 0 {
|
||||
vType := jsonType(v)
|
||||
matched := false
|
||||
for _, t := range s.Types {
|
||||
if vType == t {
|
||||
matched = true
|
||||
break
|
||||
} else if t == "integer" && vType == "number" {
|
||||
num, _ := new(big.Rat).SetString(fmt.Sprint(v))
|
||||
if num.IsInt() {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return result, validationError("type", "expected %s, but got %s", strings.Join(s.Types, " or "), vType)
|
||||
}
|
||||
}
|
||||
|
||||
var errors []error
|
||||
|
||||
if len(s.Constant) > 0 {
|
||||
if !equals(v, s.Constant[0]) {
|
||||
switch jsonType(s.Constant[0]) {
|
||||
case "object", "array":
|
||||
errors = append(errors, validationError("const", "const failed"))
|
||||
default:
|
||||
errors = append(errors, validationError("const", "value must be %#v", s.Constant[0]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.Enum) > 0 {
|
||||
matched := false
|
||||
for _, item := range s.Enum {
|
||||
if equals(v, item) {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
errors = append(errors, validationError("enum", s.enumError))
|
||||
}
|
||||
}
|
||||
|
||||
if s.format != nil && !s.format(v) {
|
||||
var val = v
|
||||
if v, ok := v.(string); ok {
|
||||
val = quote(v)
|
||||
}
|
||||
errors = append(errors, validationError("format", "%v is not valid %s", val, quote(s.Format)))
|
||||
}
|
||||
|
||||
switch v := v.(type) {
|
||||
case map[string]interface{}:
|
||||
if s.MinProperties != -1 && len(v) < s.MinProperties {
|
||||
errors = append(errors, validationError("minProperties", "minimum %d properties allowed, but found %d properties", s.MinProperties, len(v)))
|
||||
}
|
||||
if s.MaxProperties != -1 && len(v) > s.MaxProperties {
|
||||
errors = append(errors, validationError("maxProperties", "maximum %d properties allowed, but found %d properties", s.MaxProperties, len(v)))
|
||||
}
|
||||
if len(s.Required) > 0 {
|
||||
var missing []string
|
||||
for _, pname := range s.Required {
|
||||
if _, ok := v[pname]; !ok {
|
||||
missing = append(missing, quote(pname))
|
||||
}
|
||||
}
|
||||
if len(missing) > 0 {
|
||||
errors = append(errors, validationError("required", "missing properties: %s", strings.Join(missing, ", ")))
|
||||
}
|
||||
}
|
||||
|
||||
for pname, sch := range s.Properties {
|
||||
if pvalue, ok := v[pname]; ok {
|
||||
delete(result.unevalProps, pname)
|
||||
if err := validate(sch, "properties/"+escape(pname), pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.PropertyNames != nil {
|
||||
for pname := range v {
|
||||
if err := validate(s.PropertyNames, "propertyNames", pname, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.RegexProperties {
|
||||
for pname := range v {
|
||||
if !isRegex(pname) {
|
||||
errors = append(errors, validationError("", "patternProperty %s is not valid regex", quote(pname)))
|
||||
}
|
||||
}
|
||||
}
|
||||
for pattern, sch := range s.PatternProperties {
|
||||
for pname, pvalue := range v {
|
||||
if pattern.MatchString(pname) {
|
||||
delete(result.unevalProps, pname)
|
||||
if err := validate(sch, "patternProperties/"+escape(pattern.String()), pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if s.AdditionalProperties != nil {
|
||||
if allowed, ok := s.AdditionalProperties.(bool); ok {
|
||||
if !allowed && len(result.unevalProps) > 0 {
|
||||
errors = append(errors, validationError("additionalProperties", "additionalProperties %s not allowed", result.unevalPnames()))
|
||||
}
|
||||
} else {
|
||||
schema := s.AdditionalProperties.(*Schema)
|
||||
for pname := range result.unevalProps {
|
||||
if pvalue, ok := v[pname]; ok {
|
||||
if err := validate(schema, "additionalProperties", pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
result.unevalProps = nil
|
||||
}
|
||||
for dname, dvalue := range s.Dependencies {
|
||||
if _, ok := v[dname]; ok {
|
||||
switch dvalue := dvalue.(type) {
|
||||
case *Schema:
|
||||
if err := validateInplace(dvalue, "dependencies/"+escape(dname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
case []string:
|
||||
for i, pname := range dvalue {
|
||||
if _, ok := v[pname]; !ok {
|
||||
errors = append(errors, validationError("dependencies/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for dname, dvalue := range s.DependentRequired {
|
||||
if _, ok := v[dname]; ok {
|
||||
for i, pname := range dvalue {
|
||||
if _, ok := v[pname]; !ok {
|
||||
errors = append(errors, validationError("dependentRequired/"+escape(dname)+"/"+strconv.Itoa(i), "property %s is required, if %s property exists", quote(pname), quote(dname)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for dname, sch := range s.DependentSchemas {
|
||||
if _, ok := v[dname]; ok {
|
||||
if err := validateInplace(sch, "dependentSchemas/"+escape(dname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case []interface{}:
|
||||
if s.MinItems != -1 && len(v) < s.MinItems {
|
||||
errors = append(errors, validationError("minItems", "minimum %d items required, but found %d items", s.MinItems, len(v)))
|
||||
}
|
||||
if s.MaxItems != -1 && len(v) > s.MaxItems {
|
||||
errors = append(errors, validationError("maxItems", "maximum %d items required, but found %d items", s.MaxItems, len(v)))
|
||||
}
|
||||
if s.UniqueItems {
|
||||
if len(v) <= 20 {
|
||||
outer1:
|
||||
for i := 1; i < len(v); i++ {
|
||||
for j := 0; j < i; j++ {
|
||||
if equals(v[i], v[j]) {
|
||||
errors = append(errors, validationError("uniqueItems", "items at index %d and %d are equal", j, i))
|
||||
break outer1
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
m := make(map[uint64][]int)
|
||||
var h maphash.Hash
|
||||
outer2:
|
||||
for i, item := range v {
|
||||
h.Reset()
|
||||
hash(item, &h)
|
||||
k := h.Sum64()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
arr, ok := m[k]
|
||||
if ok {
|
||||
for _, j := range arr {
|
||||
if equals(v[j], item) {
|
||||
errors = append(errors, validationError("uniqueItems", "items at index %d and %d are equal", j, i))
|
||||
break outer2
|
||||
}
|
||||
}
|
||||
}
|
||||
arr = append(arr, i)
|
||||
m[k] = arr
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// items + additionalItems
|
||||
switch items := s.Items.(type) {
|
||||
case *Schema:
|
||||
for i, item := range v {
|
||||
if err := validate(items, "items", item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
result.unevalItems = nil
|
||||
case []*Schema:
|
||||
for i, item := range v {
|
||||
if i < len(items) {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(items[i], "items/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else if sch, ok := s.AdditionalItems.(*Schema); ok {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(sch, "additionalItems", item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if additionalItems, ok := s.AdditionalItems.(bool); ok {
|
||||
if additionalItems {
|
||||
result.unevalItems = nil
|
||||
} else if len(v) > len(items) {
|
||||
errors = append(errors, validationError("additionalItems", "only %d items are allowed, but found %d items", len(items), len(v)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// prefixItems + items
|
||||
for i, item := range v {
|
||||
if i < len(s.PrefixItems) {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(s.PrefixItems[i], "prefixItems/"+strconv.Itoa(i), item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else if s.Items2020 != nil {
|
||||
delete(result.unevalItems, i)
|
||||
if err := validate(s.Items2020, "items", item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// contains + minContains + maxContains
|
||||
if s.Contains != nil && (s.MinContains != -1 || s.MaxContains != -1) {
|
||||
matched := 0
|
||||
var causes []error
|
||||
for i, item := range v {
|
||||
if err := validate(s.Contains, "contains", item, strconv.Itoa(i)); err != nil {
|
||||
causes = append(causes, err)
|
||||
} else {
|
||||
matched++
|
||||
if s.ContainsEval {
|
||||
delete(result.unevalItems, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
if s.MinContains != -1 && matched < s.MinContains {
|
||||
errors = append(errors, validationError("minContains", "valid must be >= %d, but got %d", s.MinContains, matched).add(causes...))
|
||||
}
|
||||
if s.MaxContains != -1 && matched > s.MaxContains {
|
||||
errors = append(errors, validationError("maxContains", "valid must be <= %d, but got %d", s.MaxContains, matched))
|
||||
}
|
||||
}
|
||||
|
||||
case string:
|
||||
// minLength + maxLength
|
||||
if s.MinLength != -1 || s.MaxLength != -1 {
|
||||
length := utf8.RuneCount([]byte(v))
|
||||
if s.MinLength != -1 && length < s.MinLength {
|
||||
errors = append(errors, validationError("minLength", "length must be >= %d, but got %d", s.MinLength, length))
|
||||
}
|
||||
if s.MaxLength != -1 && length > s.MaxLength {
|
||||
errors = append(errors, validationError("maxLength", "length must be <= %d, but got %d", s.MaxLength, length))
|
||||
}
|
||||
}
|
||||
|
||||
if s.Pattern != nil && !s.Pattern.MatchString(v) {
|
||||
errors = append(errors, validationError("pattern", "does not match pattern %s", quote(s.Pattern.String())))
|
||||
}
|
||||
|
||||
// contentEncoding + contentMediaType
|
||||
if s.decoder != nil || s.mediaType != nil {
|
||||
decoded := s.ContentEncoding == ""
|
||||
var content []byte
|
||||
if s.decoder != nil {
|
||||
b, err := s.decoder(v)
|
||||
if err != nil {
|
||||
errors = append(errors, validationError("contentEncoding", "value is not %s encoded", s.ContentEncoding))
|
||||
} else {
|
||||
content, decoded = b, true
|
||||
}
|
||||
}
|
||||
if decoded && s.mediaType != nil {
|
||||
if s.decoder == nil {
|
||||
content = []byte(v)
|
||||
}
|
||||
if err := s.mediaType(content); err != nil {
|
||||
errors = append(errors, validationError("contentMediaType", "value is not of mediatype %s", quote(s.ContentMediaType)))
|
||||
}
|
||||
}
|
||||
if decoded && s.ContentSchema != nil {
|
||||
contentJSON, err := unmarshal(bytes.NewReader(content))
|
||||
if err != nil {
|
||||
errors = append(errors, validationError("contentSchema", "value is not valid json"))
|
||||
} else {
|
||||
err := validate(s.ContentSchema, "contentSchema", contentJSON, "")
|
||||
if err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
|
||||
// lazy convert to *big.Rat to avoid allocation
|
||||
var numVal *big.Rat
|
||||
num := func() *big.Rat {
|
||||
if numVal == nil {
|
||||
numVal, _ = new(big.Rat).SetString(fmt.Sprint(v))
|
||||
}
|
||||
return numVal
|
||||
}
|
||||
f64 := func(r *big.Rat) float64 {
|
||||
f, _ := r.Float64()
|
||||
return f
|
||||
}
|
||||
if s.Minimum != nil && num().Cmp(s.Minimum) < 0 {
|
||||
errors = append(errors, validationError("minimum", "must be >= %v but found %v", f64(s.Minimum), v))
|
||||
}
|
||||
if s.ExclusiveMinimum != nil && num().Cmp(s.ExclusiveMinimum) <= 0 {
|
||||
errors = append(errors, validationError("exclusiveMinimum", "must be > %v but found %v", f64(s.ExclusiveMinimum), v))
|
||||
}
|
||||
if s.Maximum != nil && num().Cmp(s.Maximum) > 0 {
|
||||
errors = append(errors, validationError("maximum", "must be <= %v but found %v", f64(s.Maximum), v))
|
||||
}
|
||||
if s.ExclusiveMaximum != nil && num().Cmp(s.ExclusiveMaximum) >= 0 {
|
||||
errors = append(errors, validationError("exclusiveMaximum", "must be < %v but found %v", f64(s.ExclusiveMaximum), v))
|
||||
}
|
||||
if s.MultipleOf != nil {
|
||||
if q := new(big.Rat).Quo(num(), s.MultipleOf); !q.IsInt() {
|
||||
errors = append(errors, validationError("multipleOf", "%v not multipleOf %v", v, f64(s.MultipleOf)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// $ref + $recursiveRef + $dynamicRef
|
||||
validateRef := func(sch *Schema, refPath string) error {
|
||||
if sch != nil {
|
||||
if err := validateInplace(sch, refPath); err != nil {
|
||||
var url = sch.Location
|
||||
if s.url() == sch.url() {
|
||||
url = sch.loc()
|
||||
}
|
||||
return validationError(refPath, "doesn't validate with %s", quote(url)).causes(err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := validateRef(s.Ref, "$ref"); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
if s.RecursiveRef != nil {
|
||||
sch := s.RecursiveRef
|
||||
if sch.RecursiveAnchor {
|
||||
// recursiveRef based on scope
|
||||
for _, e := range scope {
|
||||
if e.schema.RecursiveAnchor {
|
||||
sch = e.schema
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := validateRef(sch, "$recursiveRef"); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
if s.DynamicRef != nil {
|
||||
sch := s.DynamicRef
|
||||
if s.dynamicRefAnchor != "" && sch.DynamicAnchor == s.dynamicRefAnchor {
|
||||
// dynamicRef based on scope
|
||||
for i := len(scope) - 1; i >= 0; i-- {
|
||||
sr := scope[i]
|
||||
if sr.discard {
|
||||
break
|
||||
}
|
||||
for _, da := range sr.schema.dynamicAnchors {
|
||||
if da.DynamicAnchor == s.DynamicRef.DynamicAnchor && da != s.DynamicRef {
|
||||
sch = da
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := validateRef(sch, "$dynamicRef"); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
|
||||
if s.Not != nil && validateInplace(s.Not, "not") == nil {
|
||||
errors = append(errors, validationError("not", "not failed"))
|
||||
}
|
||||
|
||||
for i, sch := range s.AllOf {
|
||||
schPath := "allOf/" + strconv.Itoa(i)
|
||||
if err := validateInplace(sch, schPath); err != nil {
|
||||
errors = append(errors, validationError(schPath, "allOf failed").add(err))
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.AnyOf) > 0 {
|
||||
matched := false
|
||||
var causes []error
|
||||
for i, sch := range s.AnyOf {
|
||||
if err := validateInplace(sch, "anyOf/"+strconv.Itoa(i)); err == nil {
|
||||
matched = true
|
||||
} else {
|
||||
causes = append(causes, err)
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
errors = append(errors, validationError("anyOf", "anyOf failed").add(causes...))
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.OneOf) > 0 {
|
||||
matched := -1
|
||||
var causes []error
|
||||
for i, sch := range s.OneOf {
|
||||
if err := validateInplace(sch, "oneOf/"+strconv.Itoa(i)); err == nil {
|
||||
if matched == -1 {
|
||||
matched = i
|
||||
} else {
|
||||
errors = append(errors, validationError("oneOf", "valid against schemas at indexes %d and %d", matched, i))
|
||||
break
|
||||
}
|
||||
} else {
|
||||
causes = append(causes, err)
|
||||
}
|
||||
}
|
||||
if matched == -1 {
|
||||
errors = append(errors, validationError("oneOf", "oneOf failed").add(causes...))
|
||||
}
|
||||
}
|
||||
|
||||
// if + then + else
|
||||
if s.If != nil {
|
||||
err := validateInplace(s.If, "if")
|
||||
// "if" leaves dynamic scope
|
||||
scope[len(scope)-1].discard = true
|
||||
if err == nil {
|
||||
if s.Then != nil {
|
||||
if err := validateInplace(s.Then, "then"); err != nil {
|
||||
errors = append(errors, validationError("then", "if-then failed").add(err))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if s.Else != nil {
|
||||
if err := validateInplace(s.Else, "else"); err != nil {
|
||||
errors = append(errors, validationError("else", "if-else failed").add(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
// restore dynamic scope
|
||||
scope[len(scope)-1].discard = false
|
||||
}
|
||||
|
||||
for _, ext := range s.Extensions {
|
||||
if err := ext.Validate(ValidationContext{result, validate, validateInplace, validationError}, v); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
|
||||
// unevaluatedProperties + unevaluatedItems
|
||||
switch v := v.(type) {
|
||||
case map[string]interface{}:
|
||||
if s.UnevaluatedProperties != nil {
|
||||
for pname := range result.unevalProps {
|
||||
if pvalue, ok := v[pname]; ok {
|
||||
if err := validate(s.UnevaluatedProperties, "unevaluatedProperties", pvalue, escape(pname)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
result.unevalProps = nil
|
||||
}
|
||||
case []interface{}:
|
||||
if s.UnevaluatedItems != nil {
|
||||
for i := range result.unevalItems {
|
||||
if err := validate(s.UnevaluatedItems, "unevaluatedItems", v[i], strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
}
|
||||
result.unevalItems = nil
|
||||
}
|
||||
}
|
||||
|
||||
switch len(errors) {
|
||||
case 0:
|
||||
return result, nil
|
||||
case 1:
|
||||
return result, errors[0]
|
||||
default:
|
||||
return result, validationError("", "").add(errors...) // empty message, used just for wrapping
|
||||
}
|
||||
}
|
||||
|
||||
type validationResult struct {
|
||||
unevalProps map[string]struct{}
|
||||
unevalItems map[int]struct{}
|
||||
}
|
||||
|
||||
func (vr validationResult) unevalPnames() string {
|
||||
pnames := make([]string, 0, len(vr.unevalProps))
|
||||
for pname := range vr.unevalProps {
|
||||
pnames = append(pnames, quote(pname))
|
||||
}
|
||||
return strings.Join(pnames, ", ")
|
||||
}
|
||||
|
||||
// jsonType returns the json type of given value v.
|
||||
//
|
||||
// It panics if the given value is not valid json value
|
||||
func jsonType(v interface{}) string {
|
||||
switch v.(type) {
|
||||
case nil:
|
||||
return "null"
|
||||
case bool:
|
||||
return "boolean"
|
||||
case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
|
||||
return "number"
|
||||
case string:
|
||||
return "string"
|
||||
case []interface{}:
|
||||
return "array"
|
||||
case map[string]interface{}:
|
||||
return "object"
|
||||
}
|
||||
panic(InvalidJSONTypeError(fmt.Sprintf("%T", v)))
|
||||
}
|
||||
|
||||
// equals tells if given two json values are equal or not.
|
||||
func equals(v1, v2 interface{}) bool {
|
||||
v1Type := jsonType(v1)
|
||||
if v1Type != jsonType(v2) {
|
||||
return false
|
||||
}
|
||||
switch v1Type {
|
||||
case "array":
|
||||
arr1, arr2 := v1.([]interface{}), v2.([]interface{})
|
||||
if len(arr1) != len(arr2) {
|
||||
return false
|
||||
}
|
||||
for i := range arr1 {
|
||||
if !equals(arr1[i], arr2[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case "object":
|
||||
obj1, obj2 := v1.(map[string]interface{}), v2.(map[string]interface{})
|
||||
if len(obj1) != len(obj2) {
|
||||
return false
|
||||
}
|
||||
for k, v1 := range obj1 {
|
||||
if v2, ok := obj2[k]; ok {
|
||||
if !equals(v1, v2) {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
case "number":
|
||||
num1, _ := new(big.Rat).SetString(fmt.Sprint(v1))
|
||||
num2, _ := new(big.Rat).SetString(fmt.Sprint(v2))
|
||||
return num1.Cmp(num2) == 0
|
||||
default:
|
||||
return v1 == v2
|
||||
}
|
||||
}
|
||||
|
||||
func hash(v interface{}, h *maphash.Hash) {
|
||||
switch v := v.(type) {
|
||||
case nil:
|
||||
h.WriteByte(0)
|
||||
case bool:
|
||||
h.WriteByte(1)
|
||||
if v {
|
||||
h.WriteByte(1)
|
||||
} else {
|
||||
h.WriteByte(0)
|
||||
}
|
||||
case json.Number, float32, float64, int, int8, int32, int64, uint, uint8, uint32, uint64:
|
||||
h.WriteByte(2)
|
||||
num, _ := new(big.Rat).SetString(fmt.Sprint(v))
|
||||
h.Write(num.Num().Bytes())
|
||||
h.Write(num.Denom().Bytes())
|
||||
case string:
|
||||
h.WriteByte(3)
|
||||
h.WriteString(v)
|
||||
case []interface{}:
|
||||
h.WriteByte(4)
|
||||
for _, item := range v {
|
||||
hash(item, h)
|
||||
}
|
||||
case map[string]interface{}:
|
||||
h.WriteByte(5)
|
||||
props := make([]string, 0, len(v))
|
||||
for prop := range v {
|
||||
props = append(props, prop)
|
||||
}
|
||||
sort.Slice(props, func(i, j int) bool {
|
||||
return props[i] < props[j]
|
||||
})
|
||||
for _, prop := range props {
|
||||
hash(prop, h)
|
||||
hash(v[prop], h)
|
||||
}
|
||||
default:
|
||||
panic(InvalidJSONTypeError(fmt.Sprintf("%T", v)))
|
||||
}
|
||||
}
|
||||
|
||||
// escape converts given token to valid json-pointer token
|
||||
func escape(token string) string {
|
||||
token = strings.ReplaceAll(token, "~", "~0")
|
||||
token = strings.ReplaceAll(token, "/", "~1")
|
||||
return url.PathEscape(token)
|
||||
}
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
[submodule "testdata/JSON-Schema-Test-Suite"]
|
||||
path = testdata/JSON-Schema-Test-Suite
|
||||
url = https://github.com/json-schema-org/JSON-Schema-Test-Suite.git
|
||||
branch = main
|
||||
5
vendor/github.com/santhosh-tekuri/jsonschema/v6/.golangci.yml
generated
vendored
Normal file
5
vendor/github.com/santhosh-tekuri/jsonschema/v6/.golangci.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
linters:
|
||||
enable:
|
||||
- nakedret
|
||||
- errname
|
||||
- godot
|
||||
7
vendor/github.com/santhosh-tekuri/jsonschema/v6/.pre-commit-hooks.yaml
generated
vendored
Normal file
7
vendor/github.com/santhosh-tekuri/jsonschema/v6/.pre-commit-hooks.yaml
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
- id: jsonschema-validate
|
||||
name: Validate JSON against JSON Schema
|
||||
description: ensure json files follow specified JSON Schema
|
||||
entry: jv
|
||||
language: golang
|
||||
additional_dependencies:
|
||||
- ./cmd/jv
|
||||
86
vendor/github.com/santhosh-tekuri/jsonschema/v6/README.md
generated
vendored
Normal file
86
vendor/github.com/santhosh-tekuri/jsonschema/v6/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
# jsonschema v6.0.0
|
||||
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
[](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v6)
|
||||
[](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema/v6)
|
||||
[](https://github.com/santhosh-tekuri/jsonschema/actions/workflows/go.yaml)
|
||||
[](https://codecov.io/gh/santhosh-tekuri/jsonschema/tree/boon)
|
||||
|
||||
see [godoc](https://pkg.go.dev/github.com/santhosh-tekuri/jsonschema/v6) for examples
|
||||
|
||||
## Library Features
|
||||
|
||||
- [x] pass [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite) excluding optional(compare with other impls at [bowtie](https://bowtie-json-schema.github.io/bowtie/#))
|
||||
- [x] [](https://bowtie.report/#/dialects/draft4)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft6)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft7)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft2019-09)
|
||||
- [x] [](https://bowtie.report/#/dialects/draft2020-12)
|
||||
- [x] detect infinite loop traps
|
||||
- [x] `$schema` cycle
|
||||
- [x] validation cycle
|
||||
- [x] custom `$schema` url
|
||||
- [x] vocabulary based validation
|
||||
- [x] custom regex engine
|
||||
- [x] format assertions
|
||||
- [x] flag to enable in draft >= 2019-09
|
||||
- [x] custom format registration
|
||||
- [x] built-in formats
|
||||
- [x] regex, uuid
|
||||
- [x] ipv4, ipv6
|
||||
- [x] hostname, email
|
||||
- [x] date, time, date-time, duration
|
||||
- [x] json-pointer, relative-json-pointer
|
||||
- [x] uri, uri-reference, uri-template
|
||||
- [x] iri, iri-reference
|
||||
- [x] period, semver
|
||||
- [x] content assertions
|
||||
- [x] flag to enable in draft >= 7
|
||||
- [x] contentEncoding
|
||||
- [x] base64
|
||||
- [x] custom
|
||||
- [x] contentMediaType
|
||||
- [x] application/json
|
||||
- [x] custom
|
||||
- [x] contentSchema
|
||||
- [x] errors
|
||||
- [x] introspectable
|
||||
- [x] hierarchy
|
||||
- [x] alternative display with `#`
|
||||
- [x] output
|
||||
- [x] flag
|
||||
- [x] basic
|
||||
- [x] detailed
|
||||
- [x] custom vocabulary
|
||||
- enable via `$vocabulary` for draft >=2019-19
|
||||
- enable via flag for draft <= 7
|
||||
- [x] mixed dialect support
|
||||
|
||||
## CLI
|
||||
|
||||
to install: `go install github.com/santhosh-tekuri/jsonschema/cmd/jv@latest`
|
||||
|
||||
```
|
||||
Usage: jv [OPTIONS] SCHEMA [INSTANCE...]
|
||||
|
||||
Options:
|
||||
-c, --assert-content Enable content assertions with draft >= 7
|
||||
-f, --assert-format Enable format assertions with draft >= 2019
|
||||
--cacert pem-file Use the specified pem-file to verify the peer. The file may contain multiple CA certificates
|
||||
-d, --draft version Draft version used when '$schema' is missing. Valid values 4, 6, 7, 2019, 2020 (default 2020)
|
||||
-h, --help Print help information
|
||||
-k, --insecure Use insecure TLS connection
|
||||
-o, --output format Output format. Valid values simple, alt, flag, basic, detailed (default "simple")
|
||||
-q, --quiet Do not print errors
|
||||
-v, --version Print build information
|
||||
```
|
||||
|
||||
- [x] exit code `1` for validation erros, `2` for usage errors
|
||||
- [x] validate both schema and multiple instances
|
||||
- [x] support both json and yaml files
|
||||
- [x] support standard input, use `-`
|
||||
- [x] quite mode with parsable output
|
||||
- [x] http(s) url support
|
||||
- [x] custom certs for validation, use `--cacert`
|
||||
- [x] flag to skip certificate verification, use `--insecure`
|
||||
|
||||
332
vendor/github.com/santhosh-tekuri/jsonschema/v6/compiler.go
generated
vendored
Normal file
332
vendor/github.com/santhosh-tekuri/jsonschema/v6/compiler.go
generated
vendored
Normal file
|
|
@ -0,0 +1,332 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"slices"
|
||||
)
|
||||
|
||||
// Compiler compiles json schema into *Schema.
|
||||
type Compiler struct {
|
||||
schemas map[urlPtr]*Schema
|
||||
roots *roots
|
||||
formats map[string]*Format
|
||||
decoders map[string]*Decoder
|
||||
mediaTypes map[string]*MediaType
|
||||
assertFormat bool
|
||||
assertContent bool
|
||||
}
|
||||
|
||||
// NewCompiler create Compiler Object.
|
||||
func NewCompiler() *Compiler {
|
||||
return &Compiler{
|
||||
schemas: map[urlPtr]*Schema{},
|
||||
roots: newRoots(),
|
||||
formats: map[string]*Format{},
|
||||
decoders: map[string]*Decoder{},
|
||||
mediaTypes: map[string]*MediaType{},
|
||||
assertFormat: false,
|
||||
assertContent: false,
|
||||
}
|
||||
}
|
||||
|
||||
// DefaultDraft overrides the draft used to
|
||||
// compile schemas without `$schema` field.
|
||||
//
|
||||
// By default, this library uses the latest
|
||||
// draft supported.
|
||||
//
|
||||
// The use of this option is HIGHLY encouraged
|
||||
// to ensure continued correct operation of your
|
||||
// schema. The current default value will not stay
|
||||
// the same overtime.
|
||||
func (c *Compiler) DefaultDraft(d *Draft) {
|
||||
c.roots.defaultDraft = d
|
||||
}
|
||||
|
||||
// AssertFormat always enables format assertions.
|
||||
//
|
||||
// Default Behavior:
|
||||
// for draft-07: enabled.
|
||||
// for draft/2019-09: disabled unless metaschema says `format` vocabulary is required.
|
||||
// for draft/2020-12: disabled unless metaschema says `format-assertion` vocabulary is required.
|
||||
func (c *Compiler) AssertFormat() {
|
||||
c.assertFormat = true
|
||||
}
|
||||
|
||||
// AssertContent enables content assertions.
|
||||
//
|
||||
// Content assertions include keywords:
|
||||
// - contentEncoding
|
||||
// - contentMediaType
|
||||
// - contentSchema
|
||||
//
|
||||
// Default behavior is always disabled.
|
||||
func (c *Compiler) AssertContent() {
|
||||
c.assertContent = true
|
||||
}
|
||||
|
||||
// RegisterFormat registers custom format.
|
||||
//
|
||||
// NOTE:
|
||||
// - "regex" format can not be overridden
|
||||
// - format assertions are disabled for draft >= 2019-09
|
||||
// see [Compiler.AssertFormat]
|
||||
func (c *Compiler) RegisterFormat(f *Format) {
|
||||
if f.Name != "regex" {
|
||||
c.formats[f.Name] = f
|
||||
}
|
||||
}
|
||||
|
||||
// RegisterContentEncoding registers custom contentEncoding.
|
||||
//
|
||||
// NOTE: content assertions are disabled by default.
|
||||
// see [Compiler.AssertContent].
|
||||
func (c *Compiler) RegisterContentEncoding(d *Decoder) {
|
||||
c.decoders[d.Name] = d
|
||||
}
|
||||
|
||||
// RegisterContentMediaType registers custom contentMediaType.
|
||||
//
|
||||
// NOTE: content assertions are disabled by default.
|
||||
// see [Compiler.AssertContent].
|
||||
func (c *Compiler) RegisterContentMediaType(mt *MediaType) {
|
||||
c.mediaTypes[mt.Name] = mt
|
||||
}
|
||||
|
||||
// RegisterVocabulary registers custom vocabulary.
|
||||
//
|
||||
// NOTE:
|
||||
// - vocabularies are disabled for draft >= 2019-09
|
||||
// see [Compiler.AssertVocabs]
|
||||
func (c *Compiler) RegisterVocabulary(vocab *Vocabulary) {
|
||||
c.roots.vocabularies[vocab.URL] = vocab
|
||||
}
|
||||
|
||||
// AssertVocabs always enables user-defined vocabularies assertions.
|
||||
//
|
||||
// Default Behavior:
|
||||
// for draft-07: enabled.
|
||||
// for draft/2019-09: disabled unless metaschema enables a vocabulary.
|
||||
// for draft/2020-12: disabled unless metaschema enables a vocabulary.
|
||||
func (c *Compiler) AssertVocabs() {
|
||||
c.roots.assertVocabs = true
|
||||
}
|
||||
|
||||
// AddResource adds schema resource which gets used later in reference
|
||||
// resolution.
|
||||
//
|
||||
// The argument url can be file path or url. Any fragment in url is ignored.
|
||||
// The argument doc must be valid json value.
|
||||
func (c *Compiler) AddResource(url string, doc any) error {
|
||||
uf, err := absolute(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if isMeta(string(uf.url)) {
|
||||
return &ResourceExistsError{string(uf.url)}
|
||||
}
|
||||
if !c.roots.loader.add(uf.url, doc) {
|
||||
return &ResourceExistsError{string(uf.url)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UseLoader overrides the default [URLLoader] used
|
||||
// to load schema resources.
|
||||
func (c *Compiler) UseLoader(loader URLLoader) {
|
||||
c.roots.loader.loader = loader
|
||||
}
|
||||
|
||||
// UseRegexpEngine changes the regexp-engine used.
|
||||
// By default it uses regexp package from go standard
|
||||
// library.
|
||||
//
|
||||
// NOTE: must be called before compiling any schemas.
|
||||
func (c *Compiler) UseRegexpEngine(engine RegexpEngine) {
|
||||
if engine == nil {
|
||||
engine = goRegexpCompile
|
||||
}
|
||||
c.roots.regexpEngine = engine
|
||||
}
|
||||
|
||||
func (c *Compiler) enqueue(q *queue, up urlPtr) *Schema {
|
||||
if sch, ok := c.schemas[up]; ok {
|
||||
// already got compiled
|
||||
return sch
|
||||
}
|
||||
if sch := q.get(up); sch != nil {
|
||||
return sch
|
||||
}
|
||||
sch := newSchema(up)
|
||||
q.append(sch)
|
||||
return sch
|
||||
}
|
||||
|
||||
// MustCompile is like [Compile] but panics if compilation fails.
|
||||
// It simplifies safe initialization of global variables holding
|
||||
// compiled schema.
|
||||
func (c *Compiler) MustCompile(loc string) *Schema {
|
||||
sch, err := c.Compile(loc)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("jsonschema: Compile(%q): %v", loc, err))
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
// Compile compiles json-schema at given loc.
|
||||
func (c *Compiler) Compile(loc string) (*Schema, error) {
|
||||
uf, err := absolute(loc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
up, err := c.roots.resolveFragment(*uf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.doCompile(up)
|
||||
}
|
||||
|
||||
func (c *Compiler) doCompile(up urlPtr) (*Schema, error) {
|
||||
q := &queue{}
|
||||
compiled := 0
|
||||
|
||||
c.enqueue(q, up)
|
||||
for q.len() > compiled {
|
||||
sch := q.at(compiled)
|
||||
if err := c.roots.ensureSubschema(sch.up); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := c.roots.roots[sch.up.url]
|
||||
v, err := sch.up.lookup(r.doc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := c.compileValue(v, sch, r, q); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
compiled++
|
||||
}
|
||||
for _, sch := range *q {
|
||||
c.schemas[sch.up] = sch
|
||||
}
|
||||
return c.schemas[up], nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compileValue(v any, sch *Schema, r *root, q *queue) error {
|
||||
res := r.resource(sch.up.ptr)
|
||||
sch.DraftVersion = res.dialect.draft.version
|
||||
|
||||
base := urlPtr{sch.up.url, res.ptr}
|
||||
sch.resource = c.enqueue(q, base)
|
||||
|
||||
// if resource, enqueue dynamic anchors for compilation
|
||||
if sch.DraftVersion >= 2020 && sch.up == sch.resource.up {
|
||||
res := r.resource(sch.up.ptr)
|
||||
for anchor, anchorPtr := range res.anchors {
|
||||
if slices.Contains(res.dynamicAnchors, anchor) {
|
||||
up := urlPtr{sch.up.url, anchorPtr}
|
||||
danchorSch := c.enqueue(q, up)
|
||||
if sch.dynamicAnchors == nil {
|
||||
sch.dynamicAnchors = map[string]*Schema{}
|
||||
}
|
||||
sch.dynamicAnchors[string(anchor)] = danchorSch
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch v := v.(type) {
|
||||
case bool:
|
||||
sch.Bool = &v
|
||||
case map[string]any:
|
||||
if err := c.compileObject(v, sch, r, q); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
sch.allPropsEvaluated = sch.AdditionalProperties != nil
|
||||
if sch.DraftVersion < 2020 {
|
||||
sch.allItemsEvaluated = sch.AdditionalItems != nil
|
||||
switch items := sch.Items.(type) {
|
||||
case *Schema:
|
||||
sch.allItemsEvaluated = true
|
||||
case []*Schema:
|
||||
sch.numItemsEvaluated = len(items)
|
||||
}
|
||||
} else {
|
||||
sch.allItemsEvaluated = sch.Items2020 != nil
|
||||
sch.numItemsEvaluated = len(sch.PrefixItems)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Compiler) compileObject(obj map[string]any, sch *Schema, r *root, q *queue) error {
|
||||
if len(obj) == 0 {
|
||||
b := true
|
||||
sch.Bool = &b
|
||||
return nil
|
||||
}
|
||||
oc := objCompiler{
|
||||
c: c,
|
||||
obj: obj,
|
||||
up: sch.up,
|
||||
r: r,
|
||||
res: r.resource(sch.up.ptr),
|
||||
q: q,
|
||||
}
|
||||
return oc.compile(sch)
|
||||
}
|
||||
|
||||
// queue --
|
||||
|
||||
type queue []*Schema
|
||||
|
||||
func (q *queue) append(sch *Schema) {
|
||||
*q = append(*q, sch)
|
||||
}
|
||||
|
||||
func (q *queue) at(i int) *Schema {
|
||||
return (*q)[i]
|
||||
}
|
||||
|
||||
func (q *queue) len() int {
|
||||
return len(*q)
|
||||
}
|
||||
|
||||
func (q *queue) get(up urlPtr) *Schema {
|
||||
i := slices.IndexFunc(*q, func(sch *Schema) bool { return sch.up == up })
|
||||
if i != -1 {
|
||||
return (*q)[i]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// regexp --
|
||||
|
||||
// Regexp is the representation of compiled regular expression.
|
||||
type Regexp interface {
|
||||
fmt.Stringer
|
||||
|
||||
// MatchString reports whether the string s contains
|
||||
// any match of the regular expression.
|
||||
MatchString(string) bool
|
||||
}
|
||||
|
||||
// RegexpEngine parses a regular expression and returns,
|
||||
// if successful, a Regexp object that can be used to
|
||||
// match against text.
|
||||
type RegexpEngine func(string) (Regexp, error)
|
||||
|
||||
func (re RegexpEngine) validate(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
_, err := re(s)
|
||||
return err
|
||||
}
|
||||
|
||||
func goRegexpCompile(s string) (Regexp, error) {
|
||||
return regexp.Compile(s)
|
||||
}
|
||||
51
vendor/github.com/santhosh-tekuri/jsonschema/v6/content.go
generated
vendored
Normal file
51
vendor/github.com/santhosh-tekuri/jsonschema/v6/content.go
generated
vendored
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
)
|
||||
|
||||
// Decoder specifies how to decode specific contentEncoding.
|
||||
type Decoder struct {
|
||||
// Name of contentEncoding.
|
||||
Name string
|
||||
// Decode given string to byte array.
|
||||
Decode func(string) ([]byte, error)
|
||||
}
|
||||
|
||||
var decoders = map[string]*Decoder{
|
||||
"base64": {
|
||||
Name: "base64",
|
||||
Decode: func(s string) ([]byte, error) {
|
||||
return base64.StdEncoding.DecodeString(s)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// MediaType specified how to validate bytes against specific contentMediaType.
|
||||
type MediaType struct {
|
||||
// Name of contentMediaType.
|
||||
Name string
|
||||
|
||||
// Validate checks whether bytes conform to this mediatype.
|
||||
Validate func([]byte) error
|
||||
|
||||
// UnmarshalJSON unmarshals bytes into json value.
|
||||
// This must be nil if this mediatype is not compatible
|
||||
// with json.
|
||||
UnmarshalJSON func([]byte) (any, error)
|
||||
}
|
||||
|
||||
var mediaTypes = map[string]*MediaType{
|
||||
"application/json": {
|
||||
Name: "application/json",
|
||||
Validate: func(b []byte) error {
|
||||
var v any
|
||||
return json.Unmarshal(b, &v)
|
||||
},
|
||||
UnmarshalJSON: func(b []byte) (any, error) {
|
||||
return UnmarshalJSON(bytes.NewReader(b))
|
||||
},
|
||||
},
|
||||
}
|
||||
360
vendor/github.com/santhosh-tekuri/jsonschema/v6/draft.go
generated
vendored
Normal file
360
vendor/github.com/santhosh-tekuri/jsonschema/v6/draft.go
generated
vendored
Normal file
|
|
@ -0,0 +1,360 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A Draft represents json-schema specification.
|
||||
type Draft struct {
|
||||
version int
|
||||
url string
|
||||
sch *Schema
|
||||
id string // property name used to represent id
|
||||
subschemas []SchemaPath // locations of subschemas
|
||||
vocabPrefix string // prefix used for vocabulary
|
||||
allVocabs map[string]*Schema // names of supported vocabs with its schemas
|
||||
defaultVocabs []string // names of default vocabs
|
||||
}
|
||||
|
||||
// String returns the specification url.
|
||||
func (d *Draft) String() string {
|
||||
return d.url
|
||||
}
|
||||
|
||||
var (
|
||||
Draft4 = &Draft{
|
||||
version: 4,
|
||||
url: "http://json-schema.org/draft-04/schema",
|
||||
id: "id",
|
||||
subschemas: []SchemaPath{
|
||||
// type agonistic
|
||||
schemaPath("definitions/*"),
|
||||
schemaPath("not"),
|
||||
schemaPath("allOf/[]"),
|
||||
schemaPath("anyOf/[]"),
|
||||
schemaPath("oneOf/[]"),
|
||||
// object
|
||||
schemaPath("properties/*"),
|
||||
schemaPath("additionalProperties"),
|
||||
schemaPath("patternProperties/*"),
|
||||
// array
|
||||
schemaPath("items"),
|
||||
schemaPath("items/[]"),
|
||||
schemaPath("additionalItems"),
|
||||
schemaPath("dependencies/*"),
|
||||
},
|
||||
vocabPrefix: "",
|
||||
allVocabs: map[string]*Schema{},
|
||||
defaultVocabs: []string{},
|
||||
}
|
||||
|
||||
Draft6 = &Draft{
|
||||
version: 6,
|
||||
url: "http://json-schema.org/draft-06/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft4.subschemas,
|
||||
schemaPath("propertyNames"),
|
||||
schemaPath("contains"),
|
||||
),
|
||||
vocabPrefix: "",
|
||||
allVocabs: map[string]*Schema{},
|
||||
defaultVocabs: []string{},
|
||||
}
|
||||
|
||||
Draft7 = &Draft{
|
||||
version: 7,
|
||||
url: "http://json-schema.org/draft-07/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft6.subschemas,
|
||||
schemaPath("if"),
|
||||
schemaPath("then"),
|
||||
schemaPath("else"),
|
||||
),
|
||||
vocabPrefix: "",
|
||||
allVocabs: map[string]*Schema{},
|
||||
defaultVocabs: []string{},
|
||||
}
|
||||
|
||||
Draft2019 = &Draft{
|
||||
version: 2019,
|
||||
url: "https://json-schema.org/draft/2019-09/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft7.subschemas,
|
||||
schemaPath("$defs/*"),
|
||||
schemaPath("dependentSchemas/*"),
|
||||
schemaPath("unevaluatedProperties"),
|
||||
schemaPath("unevaluatedItems"),
|
||||
schemaPath("contentSchema"),
|
||||
),
|
||||
vocabPrefix: "https://json-schema.org/draft/2019-09/vocab/",
|
||||
allVocabs: map[string]*Schema{
|
||||
"core": nil,
|
||||
"applicator": nil,
|
||||
"validation": nil,
|
||||
"meta-data": nil,
|
||||
"format": nil,
|
||||
"content": nil,
|
||||
},
|
||||
defaultVocabs: []string{"core", "applicator", "validation"},
|
||||
}
|
||||
|
||||
Draft2020 = &Draft{
|
||||
version: 2020,
|
||||
url: "https://json-schema.org/draft/2020-12/schema",
|
||||
id: "$id",
|
||||
subschemas: joinSubschemas(Draft2019.subschemas,
|
||||
schemaPath("prefixItems/[]"),
|
||||
),
|
||||
vocabPrefix: "https://json-schema.org/draft/2020-12/vocab/",
|
||||
allVocabs: map[string]*Schema{
|
||||
"core": nil,
|
||||
"applicator": nil,
|
||||
"unevaluated": nil,
|
||||
"validation": nil,
|
||||
"meta-data": nil,
|
||||
"format-annotation": nil,
|
||||
"format-assertion": nil,
|
||||
"content": nil,
|
||||
},
|
||||
defaultVocabs: []string{"core", "applicator", "unevaluated", "validation"},
|
||||
}
|
||||
|
||||
draftLatest = Draft2020
|
||||
)
|
||||
|
||||
func init() {
|
||||
c := NewCompiler()
|
||||
c.AssertFormat()
|
||||
for _, d := range []*Draft{Draft4, Draft6, Draft7, Draft2019, Draft2020} {
|
||||
d.sch = c.MustCompile(d.url)
|
||||
for name := range d.allVocabs {
|
||||
d.allVocabs[name] = c.MustCompile(strings.TrimSuffix(d.url, "schema") + "meta/" + name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func draftFromURL(url string) *Draft {
|
||||
u, frag := split(url)
|
||||
if frag != "" {
|
||||
return nil
|
||||
}
|
||||
u, ok := strings.CutPrefix(u, "http://")
|
||||
if !ok {
|
||||
u, _ = strings.CutPrefix(u, "https://")
|
||||
}
|
||||
switch u {
|
||||
case "json-schema.org/schema":
|
||||
return draftLatest
|
||||
case "json-schema.org/draft/2020-12/schema":
|
||||
return Draft2020
|
||||
case "json-schema.org/draft/2019-09/schema":
|
||||
return Draft2019
|
||||
case "json-schema.org/draft-07/schema":
|
||||
return Draft7
|
||||
case "json-schema.org/draft-06/schema":
|
||||
return Draft6
|
||||
case "json-schema.org/draft-04/schema":
|
||||
return Draft4
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Draft) getID(obj map[string]any) string {
|
||||
if d.version < 2019 {
|
||||
if _, ok := obj["$ref"]; ok {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
id, ok := strVal(obj, d.id)
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
id, _ = split(id) // ignore fragment
|
||||
return id
|
||||
}
|
||||
|
||||
func (d *Draft) getVocabs(url url, doc any, vocabularies map[string]*Vocabulary) ([]string, error) {
|
||||
if d.version < 2019 {
|
||||
return nil, nil
|
||||
}
|
||||
obj, ok := doc.(map[string]any)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
v, ok := obj["$vocabulary"]
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
obj, ok = v.(map[string]any)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var vocabs []string
|
||||
for vocab, reqd := range obj {
|
||||
if reqd, ok := reqd.(bool); !ok || !reqd {
|
||||
continue
|
||||
}
|
||||
name, ok := strings.CutPrefix(vocab, d.vocabPrefix)
|
||||
if ok {
|
||||
if _, ok := d.allVocabs[name]; ok {
|
||||
if !slices.Contains(vocabs, name) {
|
||||
vocabs = append(vocabs, name)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
if _, ok := vocabularies[vocab]; !ok {
|
||||
return nil, &UnsupportedVocabularyError{url.String(), vocab}
|
||||
}
|
||||
if !slices.Contains(vocabs, vocab) {
|
||||
vocabs = append(vocabs, vocab)
|
||||
}
|
||||
}
|
||||
if !slices.Contains(vocabs, "core") {
|
||||
vocabs = append(vocabs, "core")
|
||||
}
|
||||
return vocabs, nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type dialect struct {
|
||||
draft *Draft
|
||||
vocabs []string // nil means use draft.defaultVocabs
|
||||
}
|
||||
|
||||
func (d *dialect) hasVocab(name string) bool {
|
||||
if name == "core" || d.draft.version < 2019 {
|
||||
return true
|
||||
}
|
||||
if d.vocabs != nil {
|
||||
return slices.Contains(d.vocabs, name)
|
||||
}
|
||||
return slices.Contains(d.draft.defaultVocabs, name)
|
||||
}
|
||||
|
||||
func (d *dialect) activeVocabs(assertVocabs bool, vocabularies map[string]*Vocabulary) []string {
|
||||
if len(vocabularies) == 0 {
|
||||
return d.vocabs
|
||||
}
|
||||
if d.draft.version < 2019 {
|
||||
assertVocabs = true
|
||||
}
|
||||
if !assertVocabs {
|
||||
return d.vocabs
|
||||
}
|
||||
var vocabs []string
|
||||
if d.vocabs == nil {
|
||||
vocabs = slices.Clone(d.draft.defaultVocabs)
|
||||
} else {
|
||||
vocabs = slices.Clone(d.vocabs)
|
||||
}
|
||||
for vocab := range vocabularies {
|
||||
if !slices.Contains(vocabs, vocab) {
|
||||
vocabs = append(vocabs, vocab)
|
||||
}
|
||||
}
|
||||
return vocabs
|
||||
}
|
||||
|
||||
func (d *dialect) getSchema(assertVocabs bool, vocabularies map[string]*Vocabulary) *Schema {
|
||||
vocabs := d.activeVocabs(assertVocabs, vocabularies)
|
||||
if vocabs == nil {
|
||||
return d.draft.sch
|
||||
}
|
||||
|
||||
var allOf []*Schema
|
||||
for _, vocab := range vocabs {
|
||||
sch := d.draft.allVocabs[vocab]
|
||||
if sch == nil {
|
||||
if v, ok := vocabularies[vocab]; ok {
|
||||
sch = v.Schema
|
||||
}
|
||||
}
|
||||
if sch != nil {
|
||||
allOf = append(allOf, sch)
|
||||
}
|
||||
}
|
||||
if !slices.Contains(vocabs, "core") {
|
||||
sch := d.draft.allVocabs["core"]
|
||||
if sch == nil {
|
||||
sch = d.draft.sch
|
||||
}
|
||||
allOf = append(allOf, sch)
|
||||
}
|
||||
sch := &Schema{
|
||||
Location: "urn:mem:metaschema",
|
||||
up: urlPtr{url("urn:mem:metaschema"), ""},
|
||||
DraftVersion: d.draft.version,
|
||||
AllOf: allOf,
|
||||
}
|
||||
sch.resource = sch
|
||||
if sch.DraftVersion >= 2020 {
|
||||
sch.DynamicAnchor = "meta"
|
||||
sch.dynamicAnchors = map[string]*Schema{
|
||||
"meta": sch,
|
||||
}
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ParseIDError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *ParseIDError) Error() string {
|
||||
return fmt.Sprintf("error in parsing id at %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ParseAnchorError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *ParseAnchorError) Error() string {
|
||||
return fmt.Sprintf("error in parsing anchor at %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DuplicateIDError struct {
|
||||
ID string
|
||||
URL string
|
||||
Ptr1 string
|
||||
Ptr2 string
|
||||
}
|
||||
|
||||
func (e *DuplicateIDError) Error() string {
|
||||
return fmt.Sprintf("duplicate id %q in %q at %q and %q", e.ID, e.URL, e.Ptr1, e.Ptr2)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DuplicateAnchorError struct {
|
||||
Anchor string
|
||||
URL string
|
||||
Ptr1 string
|
||||
Ptr2 string
|
||||
}
|
||||
|
||||
func (e *DuplicateAnchorError) Error() string {
|
||||
return fmt.Sprintf("duplicate anchor %q in %q at %q and %q", e.Anchor, e.URL, e.Ptr1, e.Ptr2)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func joinSubschemas(a1 []SchemaPath, a2 ...SchemaPath) []SchemaPath {
|
||||
var a []SchemaPath
|
||||
a = append(a, a1...)
|
||||
a = append(a, a2...)
|
||||
return a
|
||||
}
|
||||
708
vendor/github.com/santhosh-tekuri/jsonschema/v6/format.go
generated
vendored
Normal file
708
vendor/github.com/santhosh-tekuri/jsonschema/v6/format.go
generated
vendored
Normal file
|
|
@ -0,0 +1,708 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"net/netip"
|
||||
gourl "net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Format defined specific format.
|
||||
type Format struct {
|
||||
// Name of format.
|
||||
Name string
|
||||
|
||||
// Validate checks if given value is of this format.
|
||||
Validate func(v any) error
|
||||
}
|
||||
|
||||
var formats = map[string]*Format{
|
||||
"json-pointer": {"json-pointer", validateJSONPointer},
|
||||
"relative-json-pointer": {"relative-json-pointer", validateRelativeJSONPointer},
|
||||
"uuid": {"uuid", validateUUID},
|
||||
"duration": {"duration", validateDuration},
|
||||
"period": {"period", validatePeriod},
|
||||
"ipv4": {"ipv4", validateIPV4},
|
||||
"ipv6": {"ipv6", validateIPV6},
|
||||
"hostname": {"hostname", validateHostname},
|
||||
"email": {"email", validateEmail},
|
||||
"date": {"date", validateDate},
|
||||
"time": {"time", validateTime},
|
||||
"date-time": {"date-time", validateDateTime},
|
||||
"uri": {"uri", validateURI},
|
||||
"iri": {"iri", validateURI},
|
||||
"uri-reference": {"uri-reference", validateURIReference},
|
||||
"iri-reference": {"iri-reference", validateURIReference},
|
||||
"uri-template": {"uri-template", validateURITemplate},
|
||||
"semver": {"semver", validateSemver},
|
||||
}
|
||||
|
||||
// see https://www.rfc-editor.org/rfc/rfc6901#section-3
|
||||
func validateJSONPointer(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if s == "" {
|
||||
return nil
|
||||
}
|
||||
if !strings.HasPrefix(s, "/") {
|
||||
return LocalizableError("not starting with /")
|
||||
}
|
||||
for _, tok := range strings.Split(s, "/")[1:] {
|
||||
escape := false
|
||||
for _, ch := range tok {
|
||||
if escape {
|
||||
escape = false
|
||||
if ch != '0' && ch != '1' {
|
||||
return LocalizableError("~ must be followed by 0 or 1")
|
||||
}
|
||||
continue
|
||||
}
|
||||
if ch == '~' {
|
||||
escape = true
|
||||
continue
|
||||
}
|
||||
switch {
|
||||
case ch >= '\x00' && ch <= '\x2E':
|
||||
case ch >= '\x30' && ch <= '\x7D':
|
||||
case ch >= '\x7F' && ch <= '\U0010FFFF':
|
||||
default:
|
||||
return LocalizableError("invalid character %q", ch)
|
||||
}
|
||||
}
|
||||
if escape {
|
||||
return LocalizableError("~ must be followed by 0 or 1")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
func validateRelativeJSONPointer(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// start with non-negative-integer
|
||||
numDigits := 0
|
||||
for _, ch := range s {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
numDigits++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if numDigits == 0 {
|
||||
return LocalizableError("must start with non-negative integer")
|
||||
}
|
||||
if numDigits > 1 && strings.HasPrefix(s, "0") {
|
||||
return LocalizableError("starts with zero")
|
||||
}
|
||||
s = s[numDigits:]
|
||||
|
||||
// followed by either json-pointer or '#'
|
||||
if s == "#" {
|
||||
return nil
|
||||
}
|
||||
return validateJSONPointer(s)
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc4122#page-4
|
||||
func validateUUID(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
hexGroups := []int{8, 4, 4, 4, 12}
|
||||
groups := strings.Split(s, "-")
|
||||
if len(groups) != len(hexGroups) {
|
||||
return LocalizableError("must have %d elements", len(hexGroups))
|
||||
}
|
||||
for i, group := range groups {
|
||||
if len(group) != hexGroups[i] {
|
||||
return LocalizableError("element %d must be %d characters long", i+1, hexGroups[i])
|
||||
}
|
||||
for _, ch := range group {
|
||||
switch {
|
||||
case ch >= '0' && ch <= '9':
|
||||
case ch >= 'a' && ch <= 'f':
|
||||
case ch >= 'A' && ch <= 'F':
|
||||
default:
|
||||
return LocalizableError("non-hex character %q", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#appendix-A
|
||||
func validateDuration(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// must start with 'P'
|
||||
s, ok = strings.CutPrefix(s, "P")
|
||||
if !ok {
|
||||
return LocalizableError("must start with P")
|
||||
}
|
||||
if s == "" {
|
||||
return LocalizableError("nothing after P")
|
||||
}
|
||||
|
||||
// dur-week
|
||||
if s, ok := strings.CutSuffix(s, "W"); ok {
|
||||
if s == "" {
|
||||
return LocalizableError("no number in week")
|
||||
}
|
||||
for _, ch := range s {
|
||||
if ch < '0' || ch > '9' {
|
||||
return LocalizableError("invalid week")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
allUnits := []string{"YMD", "HMS"}
|
||||
for i, s := range strings.Split(s, "T") {
|
||||
if i != 0 && s == "" {
|
||||
return LocalizableError("no time elements")
|
||||
}
|
||||
if i >= len(allUnits) {
|
||||
return LocalizableError("more than one T")
|
||||
}
|
||||
units := allUnits[i]
|
||||
for s != "" {
|
||||
digitCount := 0
|
||||
for _, ch := range s {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
digitCount++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if digitCount == 0 {
|
||||
return LocalizableError("missing number")
|
||||
}
|
||||
s = s[digitCount:]
|
||||
if s == "" {
|
||||
return LocalizableError("missing unit")
|
||||
}
|
||||
unit := s[0]
|
||||
j := strings.IndexByte(units, unit)
|
||||
if j == -1 {
|
||||
if strings.IndexByte(allUnits[i], unit) != -1 {
|
||||
return LocalizableError("unit %q out of order", unit)
|
||||
}
|
||||
return LocalizableError("invalid unit %q", unit)
|
||||
}
|
||||
units = units[j+1:]
|
||||
s = s[1:]
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateIPV4(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
groups := strings.Split(s, ".")
|
||||
if len(groups) != 4 {
|
||||
return LocalizableError("expected four decimals")
|
||||
}
|
||||
for _, group := range groups {
|
||||
if len(group) > 1 && group[0] == '0' {
|
||||
return LocalizableError("leading zeros")
|
||||
}
|
||||
n, err := strconv.Atoi(group)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if n < 0 || n > 255 {
|
||||
return LocalizableError("decimal must be between 0 and 255")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateIPV6(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if !strings.Contains(s, ":") {
|
||||
return LocalizableError("missing colon")
|
||||
}
|
||||
addr, err := netip.ParseAddr(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if addr.Zone() != "" {
|
||||
return LocalizableError("zone id is not a part of ipv6 address")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names
|
||||
func validateHostname(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters
|
||||
s = strings.TrimSuffix(s, ".")
|
||||
if len(s) > 253 {
|
||||
return LocalizableError("more than 253 characters long")
|
||||
}
|
||||
|
||||
// Hostnames are composed of series of labels concatenated with dots, as are all domain names
|
||||
for _, label := range strings.Split(s, ".") {
|
||||
// Each label must be from 1 to 63 characters long
|
||||
if len(label) < 1 || len(label) > 63 {
|
||||
return LocalizableError("label must be 1 to 63 characters long")
|
||||
}
|
||||
|
||||
// labels must not start or end with a hyphen
|
||||
if strings.HasPrefix(label, "-") {
|
||||
return LocalizableError("label starts with hyphen")
|
||||
}
|
||||
if strings.HasSuffix(label, "-") {
|
||||
return LocalizableError("label ends with hyphen")
|
||||
}
|
||||
|
||||
// labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner),
|
||||
// the digits '0' through '9', and the hyphen ('-')
|
||||
for _, ch := range label {
|
||||
switch {
|
||||
case ch >= 'a' && ch <= 'z':
|
||||
case ch >= 'A' && ch <= 'Z':
|
||||
case ch >= '0' && ch <= '9':
|
||||
case ch == '-':
|
||||
default:
|
||||
return LocalizableError("invalid character %q", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://en.wikipedia.org/wiki/Email_address
|
||||
func validateEmail(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
// entire email address to be no more than 254 characters long
|
||||
if len(s) > 254 {
|
||||
return LocalizableError("more than 255 characters long")
|
||||
}
|
||||
|
||||
// email address is generally recognized as having two parts joined with an at-sign
|
||||
at := strings.LastIndexByte(s, '@')
|
||||
if at == -1 {
|
||||
return LocalizableError("missing @")
|
||||
}
|
||||
local, domain := s[:at], s[at+1:]
|
||||
|
||||
// local part may be up to 64 characters long
|
||||
if len(local) > 64 {
|
||||
return LocalizableError("local part more than 64 characters long")
|
||||
}
|
||||
|
||||
if len(local) > 1 && strings.HasPrefix(local, `"`) && strings.HasPrefix(local, `"`) {
|
||||
// quoted
|
||||
local := local[1 : len(local)-1]
|
||||
if strings.IndexByte(local, '\\') != -1 || strings.IndexByte(local, '"') != -1 {
|
||||
return LocalizableError("backslash and quote are not allowed within quoted local part")
|
||||
}
|
||||
} else {
|
||||
// unquoted
|
||||
if strings.HasPrefix(local, ".") {
|
||||
return LocalizableError("starts with dot")
|
||||
}
|
||||
if strings.HasSuffix(local, ".") {
|
||||
return LocalizableError("ends with dot")
|
||||
}
|
||||
|
||||
// consecutive dots not allowed
|
||||
if strings.Contains(local, "..") {
|
||||
return LocalizableError("consecutive dots")
|
||||
}
|
||||
|
||||
// check allowed chars
|
||||
for _, ch := range local {
|
||||
switch {
|
||||
case ch >= 'a' && ch <= 'z':
|
||||
case ch >= 'A' && ch <= 'Z':
|
||||
case ch >= '0' && ch <= '9':
|
||||
case strings.ContainsRune(".!#$%&'*+-/=?^_`{|}~", ch):
|
||||
default:
|
||||
return LocalizableError("invalid character %q", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// domain if enclosed in brackets, must match an IP address
|
||||
if strings.HasPrefix(domain, "[") && strings.HasSuffix(domain, "]") {
|
||||
domain = domain[1 : len(domain)-1]
|
||||
if rem, ok := strings.CutPrefix(domain, "IPv6:"); ok {
|
||||
if err := validateIPV6(rem); err != nil {
|
||||
return LocalizableError("invalid ipv6 address: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := validateIPV4(domain); err != nil {
|
||||
return LocalizableError("invalid ipv4 address: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// domain must match the requirements for a hostname
|
||||
if err := validateHostname(domain); err != nil {
|
||||
return LocalizableError("invalid domain: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// see see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
|
||||
func validateDate(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
_, err := time.Parse("2006-01-02", s)
|
||||
return err
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
|
||||
// NOTE: golang time package does not support leap seconds.
|
||||
func validateTime(v any) error {
|
||||
str, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// min: hh:mm:ssZ
|
||||
if len(str) < 9 {
|
||||
return LocalizableError("less than 9 characters long")
|
||||
}
|
||||
if str[2] != ':' || str[5] != ':' {
|
||||
return LocalizableError("missing colon in correct place")
|
||||
}
|
||||
|
||||
// parse hh:mm:ss
|
||||
var hms []int
|
||||
for _, tok := range strings.SplitN(str[:8], ":", 3) {
|
||||
i, err := strconv.Atoi(tok)
|
||||
if err != nil {
|
||||
return LocalizableError("invalid hour/min/sec")
|
||||
}
|
||||
if i < 0 {
|
||||
return LocalizableError("non-positive hour/min/sec")
|
||||
}
|
||||
hms = append(hms, i)
|
||||
}
|
||||
if len(hms) != 3 {
|
||||
return LocalizableError("missing hour/min/sec")
|
||||
}
|
||||
h, m, s := hms[0], hms[1], hms[2]
|
||||
if h > 23 || m > 59 || s > 60 {
|
||||
return LocalizableError("hour/min/sec out of range")
|
||||
}
|
||||
str = str[8:]
|
||||
|
||||
// parse sec-frac if present
|
||||
if rem, ok := strings.CutPrefix(str, "."); ok {
|
||||
numDigits := 0
|
||||
for _, ch := range rem {
|
||||
if ch >= '0' && ch <= '9' {
|
||||
numDigits++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if numDigits == 0 {
|
||||
return LocalizableError("no digits in second fraction")
|
||||
}
|
||||
str = rem[numDigits:]
|
||||
}
|
||||
|
||||
if str != "z" && str != "Z" {
|
||||
// parse time-numoffset
|
||||
if len(str) != 6 {
|
||||
return LocalizableError("offset must be 6 characters long")
|
||||
}
|
||||
var sign int
|
||||
switch str[0] {
|
||||
case '+':
|
||||
sign = -1
|
||||
case '-':
|
||||
sign = +1
|
||||
default:
|
||||
return LocalizableError("offset must begin with plus/minus")
|
||||
}
|
||||
str = str[1:]
|
||||
if str[2] != ':' {
|
||||
return LocalizableError("missing colon in offset in correct place")
|
||||
}
|
||||
|
||||
var zhm []int
|
||||
for _, tok := range strings.SplitN(str, ":", 2) {
|
||||
i, err := strconv.Atoi(tok)
|
||||
if err != nil {
|
||||
return LocalizableError("invalid hour/min in offset")
|
||||
}
|
||||
if i < 0 {
|
||||
return LocalizableError("non-positive hour/min in offset")
|
||||
}
|
||||
zhm = append(zhm, i)
|
||||
}
|
||||
zh, zm := zhm[0], zhm[1]
|
||||
if zh > 23 || zm > 59 {
|
||||
return LocalizableError("hour/min in offset out of range")
|
||||
}
|
||||
|
||||
// apply timezone
|
||||
hm := (h*60 + m) + sign*(zh*60+zm)
|
||||
if hm < 0 {
|
||||
hm += 24 * 60
|
||||
}
|
||||
h, m = hm/60, hm%60
|
||||
}
|
||||
|
||||
// check leap second
|
||||
if s >= 60 && (h != 23 || m != 59) {
|
||||
return LocalizableError("invalid leap second")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://datatracker.ietf.org/doc/html/rfc3339#section-5.6
|
||||
func validateDateTime(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// min: yyyy-mm-ddThh:mm:ssZ
|
||||
if len(s) < 20 {
|
||||
return LocalizableError("less than 20 characters long")
|
||||
}
|
||||
|
||||
if s[10] != 't' && s[10] != 'T' {
|
||||
return LocalizableError("11th character must be t or T")
|
||||
}
|
||||
if err := validateDate(s[:10]); err != nil {
|
||||
return LocalizableError("invalid date element: %v", err)
|
||||
}
|
||||
if err := validateTime(s[11:]); err != nil {
|
||||
return LocalizableError("invalid time element: %v", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseURL(s string) (*gourl.URL, error) {
|
||||
u, err := gourl.Parse(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// gourl does not validate ipv6 host address
|
||||
hostName := u.Hostname()
|
||||
if strings.Contains(hostName, ":") {
|
||||
if !strings.Contains(u.Host, "[") || !strings.Contains(u.Host, "]") {
|
||||
return nil, LocalizableError("ipv6 address not enclosed in brackets")
|
||||
}
|
||||
if err := validateIPV6(hostName); err != nil {
|
||||
return nil, LocalizableError("invalid ipv6 address: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return u, nil
|
||||
}
|
||||
|
||||
func validateURI(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
u, err := parseURL(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !u.IsAbs() {
|
||||
return LocalizableError("relative url")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateURIReference(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if strings.Contains(s, `\`) {
|
||||
return LocalizableError(`contains \`)
|
||||
}
|
||||
_, err := parseURL(s)
|
||||
return err
|
||||
}
|
||||
|
||||
func validateURITemplate(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
u, err := parseURL(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, tok := range strings.Split(u.RawPath, "/") {
|
||||
tok, err = decode(tok)
|
||||
if err != nil {
|
||||
return LocalizableError("percent decode failed: %v", err)
|
||||
}
|
||||
want := true
|
||||
for _, ch := range tok {
|
||||
var got bool
|
||||
switch ch {
|
||||
case '{':
|
||||
got = true
|
||||
case '}':
|
||||
got = false
|
||||
default:
|
||||
continue
|
||||
}
|
||||
if got != want {
|
||||
return LocalizableError("nested curly braces")
|
||||
}
|
||||
want = !want
|
||||
}
|
||||
if !want {
|
||||
return LocalizableError("no matching closing brace")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validatePeriod(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
slash := strings.IndexByte(s, '/')
|
||||
if slash == -1 {
|
||||
return LocalizableError("missing slash")
|
||||
}
|
||||
|
||||
start, end := s[:slash], s[slash+1:]
|
||||
if strings.HasPrefix(start, "P") {
|
||||
if err := validateDuration(start); err != nil {
|
||||
return LocalizableError("invalid start duration: %v", err)
|
||||
}
|
||||
if err := validateDateTime(end); err != nil {
|
||||
return LocalizableError("invalid end date-time: %v", err)
|
||||
}
|
||||
} else {
|
||||
if err := validateDateTime(start); err != nil {
|
||||
return LocalizableError("invalid start date-time: %v", err)
|
||||
}
|
||||
if strings.HasPrefix(end, "P") {
|
||||
if err := validateDuration(end); err != nil {
|
||||
return LocalizableError("invalid end duration: %v", err)
|
||||
}
|
||||
} else if err := validateDateTime(end); err != nil {
|
||||
return LocalizableError("invalid end date-time: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// see https://semver.org/#backusnaur-form-grammar-for-valid-semver-versions
|
||||
func validateSemver(v any) error {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
// build --
|
||||
if i := strings.IndexByte(s, '+'); i != -1 {
|
||||
build := s[i+1:]
|
||||
if build == "" {
|
||||
return LocalizableError("build is empty")
|
||||
}
|
||||
for _, buildID := range strings.Split(build, ".") {
|
||||
if buildID == "" {
|
||||
return LocalizableError("build identifier is empty")
|
||||
}
|
||||
for _, ch := range buildID {
|
||||
switch {
|
||||
case ch >= '0' && ch <= '9':
|
||||
case (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '-':
|
||||
default:
|
||||
return LocalizableError("invalid character %q in build identifier", ch)
|
||||
}
|
||||
}
|
||||
}
|
||||
s = s[:i]
|
||||
}
|
||||
|
||||
// pre-release --
|
||||
if i := strings.IndexByte(s, '-'); i != -1 {
|
||||
preRelease := s[i+1:]
|
||||
for _, preReleaseID := range strings.Split(preRelease, ".") {
|
||||
if preReleaseID == "" {
|
||||
return LocalizableError("pre-release identifier is empty")
|
||||
}
|
||||
allDigits := true
|
||||
for _, ch := range preReleaseID {
|
||||
switch {
|
||||
case ch >= '0' && ch <= '9':
|
||||
case (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '-':
|
||||
allDigits = false
|
||||
default:
|
||||
return LocalizableError("invalid character %q in pre-release identifier", ch)
|
||||
}
|
||||
}
|
||||
if allDigits && len(preReleaseID) > 1 && preReleaseID[0] == '0' {
|
||||
return LocalizableError("pre-release numeric identifier starts with zero")
|
||||
}
|
||||
}
|
||||
s = s[:i]
|
||||
}
|
||||
|
||||
// versionCore --
|
||||
versions := strings.Split(s, ".")
|
||||
if len(versions) != 3 {
|
||||
return LocalizableError("versionCore must have 3 numbers separated by dot")
|
||||
}
|
||||
names := []string{"major", "minor", "patch"}
|
||||
for i, version := range versions {
|
||||
if version == "" {
|
||||
return LocalizableError("%s is empty", names[i])
|
||||
}
|
||||
if len(version) > 1 && version[0] == '0' {
|
||||
return LocalizableError("%s starts with zero", names[i])
|
||||
}
|
||||
for _, ch := range version {
|
||||
if ch < '0' || ch > '9' {
|
||||
return LocalizableError("%s contains non-digit", names[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
8
vendor/github.com/santhosh-tekuri/jsonschema/v6/go.work
generated
vendored
Normal file
8
vendor/github.com/santhosh-tekuri/jsonschema/v6/go.work
generated
vendored
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
go 1.21.1
|
||||
|
||||
use (
|
||||
.
|
||||
./cmd/jv
|
||||
)
|
||||
|
||||
replace github.com/santhosh-tekuri/jsonschema/v6 v6.0.0 => ./
|
||||
651
vendor/github.com/santhosh-tekuri/jsonschema/v6/kind/kind.go
generated
vendored
Normal file
651
vendor/github.com/santhosh-tekuri/jsonschema/v6/kind/kind.go
generated
vendored
Normal file
|
|
@ -0,0 +1,651 @@
|
|||
package kind
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/big"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
// --
|
||||
|
||||
type InvalidJsonValue struct {
|
||||
Value any
|
||||
}
|
||||
|
||||
func (*InvalidJsonValue) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *InvalidJsonValue) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("invalid jsonType %T", k.Value)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Schema struct {
|
||||
Location string
|
||||
}
|
||||
|
||||
func (*Schema) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *Schema) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("jsonschema validation failed with %s", quote(k.Location))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Group struct{}
|
||||
|
||||
func (*Group) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*Group) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("validation failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Not struct{}
|
||||
|
||||
func (*Not) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*Not) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("not failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AllOf struct{}
|
||||
|
||||
func (*AllOf) KeywordPath() []string {
|
||||
return []string{"allOf"}
|
||||
}
|
||||
|
||||
func (*AllOf) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("allOf failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AnyOf struct{}
|
||||
|
||||
func (*AnyOf) KeywordPath() []string {
|
||||
return []string{"anyOf"}
|
||||
}
|
||||
|
||||
func (*AnyOf) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("anyOf failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type OneOf struct {
|
||||
// Subschemas gives indexes of Subschemas that have matched.
|
||||
// Value nil, means none of the subschemas matched.
|
||||
Subschemas []int
|
||||
}
|
||||
|
||||
func (*OneOf) KeywordPath() []string {
|
||||
return []string{"oneOf"}
|
||||
}
|
||||
|
||||
func (k *OneOf) LocalizedString(p *message.Printer) string {
|
||||
if len(k.Subschemas) == 0 {
|
||||
return p.Sprintf("oneOf failed, none matched")
|
||||
}
|
||||
return p.Sprintf("oneOf failed, subschemas %d, %d matched", k.Subschemas[0], k.Subschemas[1])
|
||||
}
|
||||
|
||||
//--
|
||||
|
||||
type FalseSchema struct{}
|
||||
|
||||
func (*FalseSchema) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*FalseSchema) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("false schema")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type RefCycle struct {
|
||||
URL string
|
||||
KeywordLocation1 string
|
||||
KeywordLocation2 string
|
||||
}
|
||||
|
||||
func (*RefCycle) KeywordPath() []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (k *RefCycle) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("both %s and %s resolve to %q causing reference cycle", k.KeywordLocation1, k.KeywordLocation2, k.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Type struct {
|
||||
Got string
|
||||
Want []string
|
||||
}
|
||||
|
||||
func (*Type) KeywordPath() []string {
|
||||
return []string{"type"}
|
||||
}
|
||||
|
||||
func (k *Type) LocalizedString(p *message.Printer) string {
|
||||
want := strings.Join(k.Want, " or ")
|
||||
return p.Sprintf("got %s, want %s", k.Got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Enum struct {
|
||||
Got any
|
||||
Want []any
|
||||
}
|
||||
|
||||
// KeywordPath implements jsonschema.ErrorKind.
|
||||
func (*Enum) KeywordPath() []string {
|
||||
return []string{"enum"}
|
||||
}
|
||||
|
||||
func (k *Enum) LocalizedString(p *message.Printer) string {
|
||||
allPrimitive := true
|
||||
loop:
|
||||
for _, item := range k.Want {
|
||||
switch item.(type) {
|
||||
case []any, map[string]any:
|
||||
allPrimitive = false
|
||||
break loop
|
||||
}
|
||||
}
|
||||
if allPrimitive {
|
||||
if len(k.Want) == 1 {
|
||||
return p.Sprintf("value must be %s", display(k.Want[0]))
|
||||
}
|
||||
var want []string
|
||||
for _, v := range k.Want {
|
||||
want = append(want, display(v))
|
||||
}
|
||||
return p.Sprintf("value must be one of %s", strings.Join(want, ", "))
|
||||
}
|
||||
return p.Sprintf("enum failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Const struct {
|
||||
Got any
|
||||
Want any
|
||||
}
|
||||
|
||||
func (*Const) KeywordPath() []string {
|
||||
return []string{"const"}
|
||||
}
|
||||
|
||||
func (k *Const) LocalizedString(p *message.Printer) string {
|
||||
switch want := k.Want.(type) {
|
||||
case []any, map[string]any:
|
||||
return p.Sprintf("const failed")
|
||||
default:
|
||||
return p.Sprintf("value must be %s", display(want))
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Format struct {
|
||||
Got any
|
||||
Want string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (*Format) KeywordPath() []string {
|
||||
return []string{"format"}
|
||||
}
|
||||
|
||||
func (k *Format) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("%s is not valid %s: %v", display(k.Got), k.Want, localizedError(k.Err, p))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Reference struct {
|
||||
Keyword string
|
||||
URL string
|
||||
}
|
||||
|
||||
func (k *Reference) KeywordPath() []string {
|
||||
return []string{k.Keyword}
|
||||
}
|
||||
|
||||
func (*Reference) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("validation failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinProperties struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MinProperties) KeywordPath() []string {
|
||||
return []string{"minProperties"}
|
||||
}
|
||||
|
||||
func (k *MinProperties) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("minProperties: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxProperties struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MaxProperties) KeywordPath() []string {
|
||||
return []string{"maxProperties"}
|
||||
}
|
||||
|
||||
func (k *MaxProperties) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("maxProperties: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinItems struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MinItems) KeywordPath() []string {
|
||||
return []string{"minItems"}
|
||||
}
|
||||
|
||||
func (k *MinItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("minItems: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxItems struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MaxItems) KeywordPath() []string {
|
||||
return []string{"maxItems"}
|
||||
}
|
||||
|
||||
func (k *MaxItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("maxItems: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AdditionalItems struct {
|
||||
Count int
|
||||
}
|
||||
|
||||
func (*AdditionalItems) KeywordPath() []string {
|
||||
return []string{"additionalItems"}
|
||||
}
|
||||
|
||||
func (k *AdditionalItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("last %d additionalItem(s) not allowed", k.Count)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Required struct {
|
||||
Missing []string
|
||||
}
|
||||
|
||||
func (*Required) KeywordPath() []string {
|
||||
return []string{"required"}
|
||||
}
|
||||
|
||||
func (k *Required) LocalizedString(p *message.Printer) string {
|
||||
if len(k.Missing) == 1 {
|
||||
return p.Sprintf("missing property %s", quote(k.Missing[0]))
|
||||
}
|
||||
return p.Sprintf("missing properties %s", joinQuoted(k.Missing, ", "))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Dependency struct {
|
||||
Prop string // dependency of prop that failed
|
||||
Missing []string // missing props
|
||||
}
|
||||
|
||||
func (k *Dependency) KeywordPath() []string {
|
||||
return []string{"dependency", k.Prop}
|
||||
}
|
||||
|
||||
func (k *Dependency) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("properties %s required, if %s exists", joinQuoted(k.Missing, ", "), quote(k.Prop))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DependentRequired struct {
|
||||
Prop string // dependency of prop that failed
|
||||
Missing []string // missing props
|
||||
}
|
||||
|
||||
func (k *DependentRequired) KeywordPath() []string {
|
||||
return []string{"dependentRequired", k.Prop}
|
||||
}
|
||||
|
||||
func (k *DependentRequired) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("properties %s required, if %s exists", joinQuoted(k.Missing, ", "), quote(k.Prop))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AdditionalProperties struct {
|
||||
Properties []string
|
||||
}
|
||||
|
||||
func (*AdditionalProperties) KeywordPath() []string {
|
||||
return []string{"additionalProperties"}
|
||||
}
|
||||
|
||||
func (k *AdditionalProperties) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("additional properties %s not allowed", joinQuoted(k.Properties, ", "))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type PropertyNames struct {
|
||||
Property string
|
||||
}
|
||||
|
||||
func (*PropertyNames) KeywordPath() []string {
|
||||
return []string{"propertyNames"}
|
||||
}
|
||||
|
||||
func (k *PropertyNames) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("invalid propertyName %s", quote(k.Property))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type UniqueItems struct {
|
||||
Duplicates [2]int
|
||||
}
|
||||
|
||||
func (*UniqueItems) KeywordPath() []string {
|
||||
return []string{"uniqueItems"}
|
||||
}
|
||||
|
||||
func (k *UniqueItems) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("items at %d and %d are equal", k.Duplicates[0], k.Duplicates[1])
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Contains struct{}
|
||||
|
||||
func (*Contains) KeywordPath() []string {
|
||||
return []string{"contains"}
|
||||
}
|
||||
|
||||
func (*Contains) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("no items match contains schema")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinContains struct {
|
||||
Got []int
|
||||
Want int
|
||||
}
|
||||
|
||||
func (*MinContains) KeywordPath() []string {
|
||||
return []string{"minContains"}
|
||||
}
|
||||
|
||||
func (k *MinContains) LocalizedString(p *message.Printer) string {
|
||||
if len(k.Got) == 0 {
|
||||
return p.Sprintf("min %d items required to match contains schema, but none matched", k.Want)
|
||||
} else {
|
||||
got := fmt.Sprintf("%v", k.Got)
|
||||
return p.Sprintf("min %d items required to match contains schema, but matched %d items at %v", k.Want, len(k.Got), got[1:len(got)-1])
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxContains struct {
|
||||
Got []int
|
||||
Want int
|
||||
}
|
||||
|
||||
func (*MaxContains) KeywordPath() []string {
|
||||
return []string{"maxContains"}
|
||||
}
|
||||
|
||||
func (k *MaxContains) LocalizedString(p *message.Printer) string {
|
||||
got := fmt.Sprintf("%v", k.Got)
|
||||
return p.Sprintf("max %d items required to match contains schema, but matched %d items at %v", k.Want, len(k.Got), got[1:len(got)-1])
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MinLength struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MinLength) KeywordPath() []string {
|
||||
return []string{"minLength"}
|
||||
}
|
||||
|
||||
func (k *MinLength) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("minLength: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MaxLength struct {
|
||||
Got, Want int
|
||||
}
|
||||
|
||||
func (*MaxLength) KeywordPath() []string {
|
||||
return []string{"maxLength"}
|
||||
}
|
||||
|
||||
func (k *MaxLength) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("maxLength: got %d, want %d", k.Got, k.Want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Pattern struct {
|
||||
Got string
|
||||
Want string
|
||||
}
|
||||
|
||||
func (*Pattern) KeywordPath() []string {
|
||||
return []string{"pattern"}
|
||||
}
|
||||
|
||||
func (k *Pattern) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("%s does not match pattern %s", quote(k.Got), quote(k.Want))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ContentEncoding struct {
|
||||
Want string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (*ContentEncoding) KeywordPath() []string {
|
||||
return []string{"contentEncoding"}
|
||||
}
|
||||
|
||||
func (k *ContentEncoding) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("value is not %s encoded: %v", quote(k.Want), localizedError(k.Err, p))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ContentMediaType struct {
|
||||
Got []byte
|
||||
Want string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (*ContentMediaType) KeywordPath() []string {
|
||||
return []string{"contentMediaType"}
|
||||
}
|
||||
|
||||
func (k *ContentMediaType) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("value if not of mediatype %s: %v", quote(k.Want), k.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ContentSchema struct{}
|
||||
|
||||
func (*ContentSchema) KeywordPath() []string {
|
||||
return []string{"contentSchema"}
|
||||
}
|
||||
|
||||
func (*ContentSchema) LocalizedString(p *message.Printer) string {
|
||||
return p.Sprintf("contentSchema failed")
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Minimum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*Minimum) KeywordPath() []string {
|
||||
return []string{"minimum"}
|
||||
}
|
||||
|
||||
func (k *Minimum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("minimum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Maximum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*Maximum) KeywordPath() []string {
|
||||
return []string{"maximum"}
|
||||
}
|
||||
|
||||
func (k *Maximum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("maximum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ExclusiveMinimum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*ExclusiveMinimum) KeywordPath() []string {
|
||||
return []string{"exclusiveMinimum"}
|
||||
}
|
||||
|
||||
func (k *ExclusiveMinimum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("exclusiveMinimum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ExclusiveMaximum struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*ExclusiveMaximum) KeywordPath() []string {
|
||||
return []string{"exclusiveMaximum"}
|
||||
}
|
||||
|
||||
func (k *ExclusiveMaximum) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("exclusiveMaximum: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MultipleOf struct {
|
||||
Got *big.Rat
|
||||
Want *big.Rat
|
||||
}
|
||||
|
||||
func (*MultipleOf) KeywordPath() []string {
|
||||
return []string{"multipleOf"}
|
||||
}
|
||||
|
||||
func (k *MultipleOf) LocalizedString(p *message.Printer) string {
|
||||
got, _ := k.Got.Float64()
|
||||
want, _ := k.Want.Float64()
|
||||
return p.Sprintf("multipleOf: got %v, want %v", got, want)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func quote(s string) string {
|
||||
s = fmt.Sprintf("%q", s)
|
||||
s = strings.ReplaceAll(s, `\"`, `"`)
|
||||
s = strings.ReplaceAll(s, `'`, `\'`)
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
|
||||
func joinQuoted(arr []string, sep string) string {
|
||||
var sb strings.Builder
|
||||
for _, s := range arr {
|
||||
if sb.Len() > 0 {
|
||||
sb.WriteString(sep)
|
||||
}
|
||||
sb.WriteString(quote(s))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// to be used only for primitive.
|
||||
func display(v any) string {
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
return quote(v)
|
||||
case []any, map[string]any:
|
||||
return "value"
|
||||
default:
|
||||
return fmt.Sprintf("%v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func localizedError(err error, p *message.Printer) string {
|
||||
if err, ok := err.(interface{ LocalizedError(*message.Printer) string }); ok {
|
||||
return err.LocalizedError(p)
|
||||
}
|
||||
return err.Error()
|
||||
}
|
||||
266
vendor/github.com/santhosh-tekuri/jsonschema/v6/loader.go
generated
vendored
Normal file
266
vendor/github.com/santhosh-tekuri/jsonschema/v6/loader.go
generated
vendored
Normal file
|
|
@ -0,0 +1,266 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
gourl "net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// URLLoader knows how to load json from given url.
|
||||
type URLLoader interface {
|
||||
// Load loads json from given absolute url.
|
||||
Load(url string) (any, error)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// FileLoader loads json file url.
|
||||
type FileLoader struct{}
|
||||
|
||||
func (l FileLoader) Load(url string) (any, error) {
|
||||
path, err := l.ToFile(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
return UnmarshalJSON(f)
|
||||
}
|
||||
|
||||
// ToFile is helper method to convert file url to file path.
|
||||
func (l FileLoader) ToFile(url string) (string, error) {
|
||||
u, err := gourl.Parse(url)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if u.Scheme != "file" {
|
||||
return "", fmt.Errorf("invalid file url: %s", u)
|
||||
}
|
||||
path := u.Path
|
||||
if runtime.GOOS == "windows" {
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
path = filepath.FromSlash(path)
|
||||
}
|
||||
return path, nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// SchemeURLLoader delegates to other [URLLoaders]
|
||||
// based on url scheme.
|
||||
type SchemeURLLoader map[string]URLLoader
|
||||
|
||||
func (l SchemeURLLoader) Load(url string) (any, error) {
|
||||
u, err := gourl.Parse(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ll, ok := l[u.Scheme]
|
||||
if !ok {
|
||||
return nil, &UnsupportedURLSchemeError{u.String()}
|
||||
}
|
||||
return ll.Load(url)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
//go:embed metaschemas
|
||||
var metaFS embed.FS
|
||||
|
||||
func openMeta(url string) (fs.File, error) {
|
||||
u, meta := strings.CutPrefix(url, "http://json-schema.org/")
|
||||
if !meta {
|
||||
u, meta = strings.CutPrefix(url, "https://json-schema.org/")
|
||||
}
|
||||
if meta {
|
||||
if u == "schema" {
|
||||
return openMeta(draftLatest.url)
|
||||
}
|
||||
f, err := metaFS.Open("metaschemas/" + u)
|
||||
if err != nil {
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
return nil, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return f, err
|
||||
}
|
||||
return nil, nil
|
||||
|
||||
}
|
||||
|
||||
func isMeta(url string) bool {
|
||||
f, err := openMeta(url)
|
||||
if err != nil {
|
||||
return true
|
||||
}
|
||||
if f != nil {
|
||||
f.Close()
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func loadMeta(url string) (any, error) {
|
||||
f, err := openMeta(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if f == nil {
|
||||
return nil, nil
|
||||
}
|
||||
defer f.Close()
|
||||
return UnmarshalJSON(f)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type defaultLoader struct {
|
||||
docs map[url]any // docs loaded so far
|
||||
loader URLLoader
|
||||
}
|
||||
|
||||
func (l *defaultLoader) add(url url, doc any) bool {
|
||||
if _, ok := l.docs[url]; ok {
|
||||
return false
|
||||
}
|
||||
l.docs[url] = doc
|
||||
return true
|
||||
}
|
||||
|
||||
func (l *defaultLoader) load(url url) (any, error) {
|
||||
if doc, ok := l.docs[url]; ok {
|
||||
return doc, nil
|
||||
}
|
||||
doc, err := loadMeta(url.String())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if doc != nil {
|
||||
l.add(url, doc)
|
||||
return doc, nil
|
||||
}
|
||||
if l.loader == nil {
|
||||
return nil, &LoadURLError{url.String(), errors.New("no URLLoader set")}
|
||||
}
|
||||
doc, err = l.loader.Load(url.String())
|
||||
if err != nil {
|
||||
return nil, &LoadURLError{URL: url.String(), Err: err}
|
||||
}
|
||||
l.add(url, doc)
|
||||
return doc, nil
|
||||
}
|
||||
|
||||
func (l *defaultLoader) getDraft(up urlPtr, doc any, defaultDraft *Draft, cycle map[url]struct{}) (*Draft, error) {
|
||||
obj, ok := doc.(map[string]any)
|
||||
if !ok {
|
||||
return defaultDraft, nil
|
||||
}
|
||||
sch, ok := strVal(obj, "$schema")
|
||||
if !ok {
|
||||
return defaultDraft, nil
|
||||
}
|
||||
if draft := draftFromURL(sch); draft != nil {
|
||||
return draft, nil
|
||||
}
|
||||
sch, _ = split(sch)
|
||||
if _, err := gourl.Parse(sch); err != nil {
|
||||
return nil, &InvalidMetaSchemaURLError{up.String(), err}
|
||||
}
|
||||
schUrl := url(sch)
|
||||
if up.ptr.isEmpty() && schUrl == up.url {
|
||||
return nil, &UnsupportedDraftError{schUrl.String()}
|
||||
}
|
||||
if _, ok := cycle[schUrl]; ok {
|
||||
return nil, &MetaSchemaCycleError{schUrl.String()}
|
||||
}
|
||||
cycle[schUrl] = struct{}{}
|
||||
doc, err := l.load(schUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return l.getDraft(urlPtr{schUrl, ""}, doc, defaultDraft, cycle)
|
||||
}
|
||||
|
||||
func (l *defaultLoader) getMetaVocabs(doc any, draft *Draft, vocabularies map[string]*Vocabulary) ([]string, error) {
|
||||
obj, ok := doc.(map[string]any)
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
sch, ok := strVal(obj, "$schema")
|
||||
if !ok {
|
||||
return nil, nil
|
||||
}
|
||||
if draft := draftFromURL(sch); draft != nil {
|
||||
return nil, nil
|
||||
}
|
||||
sch, _ = split(sch)
|
||||
if _, err := gourl.Parse(sch); err != nil {
|
||||
return nil, &ParseURLError{sch, err}
|
||||
}
|
||||
schUrl := url(sch)
|
||||
doc, err := l.load(schUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return draft.getVocabs(schUrl, doc, vocabularies)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type LoadURLError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *LoadURLError) Error() string {
|
||||
return fmt.Sprintf("failing loading %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type UnsupportedURLSchemeError struct {
|
||||
url string
|
||||
}
|
||||
|
||||
func (e *UnsupportedURLSchemeError) Error() string {
|
||||
return fmt.Sprintf("no URLLoader registered for %q", e.url)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ResourceExistsError struct {
|
||||
url string
|
||||
}
|
||||
|
||||
func (e *ResourceExistsError) Error() string {
|
||||
return fmt.Sprintf("resource for %q already exists", e.url)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// UnmarshalJSON unmarshals into [any] without losing
|
||||
// number precision using [json.Number].
|
||||
func UnmarshalJSON(r io.Reader) (any, error) {
|
||||
decoder := json.NewDecoder(r)
|
||||
decoder.UseNumber()
|
||||
var doc any
|
||||
if err := decoder.Decode(&doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err := decoder.Token(); err == nil || err != io.EOF {
|
||||
return nil, fmt.Errorf("invalid character after top-level value")
|
||||
}
|
||||
return doc, nil
|
||||
}
|
||||
151
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-04/schema
generated
vendored
Normal file
151
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-04/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,151 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"description": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"positiveInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"positiveIntegerDefault0": {
|
||||
"allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uriref"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": {},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"exclusiveMinimum": true
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/positiveInteger" },
|
||||
"minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": {
|
||||
"anyOf": [
|
||||
{ "type": "boolean" },
|
||||
{ "$ref": "#" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/positiveInteger" },
|
||||
"minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxProperties": { "$ref": "#/definitions/positiveInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "type": "boolean" },
|
||||
{ "$ref": "#" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" },
|
||||
"format": { "type": "string" },
|
||||
"$ref": { "type": "string" }
|
||||
},
|
||||
"dependencies": {
|
||||
"exclusiveMaximum": [ "maximum" ],
|
||||
"exclusiveMinimum": [ "minimum" ]
|
||||
},
|
||||
"default": {}
|
||||
}
|
||||
150
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-06/schema
generated
vendored
Normal file
150
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-06/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-06/schema#",
|
||||
"$id": "http://json-schema.org/draft-06/schema#",
|
||||
"title": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"allOf": [
|
||||
{ "$ref": "#/definitions/nonNegativeInteger" },
|
||||
{ "default": 0 }
|
||||
]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": {},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": { "$ref": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"contains": { "$ref": "#" },
|
||||
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$ref": "#" },
|
||||
"const": {},
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"format": { "type": "string" },
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" }
|
||||
},
|
||||
"default": {}
|
||||
}
|
||||
172
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-07/schema
generated
vendored
Normal file
172
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft-07/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"allOf": [
|
||||
{ "$ref": "#/definitions/nonNegativeInteger" },
|
||||
{ "default": 0 }
|
||||
]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": { "$ref": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": true
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"contains": { "$ref": "#" },
|
||||
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$ref": "#" },
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true,
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"format": { "type": "string" },
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentEncoding": { "type": "string" },
|
||||
"if": { "$ref": "#" },
|
||||
"then": { "$ref": "#" },
|
||||
"else": { "$ref": "#" },
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" }
|
||||
},
|
||||
"default": true
|
||||
}
|
||||
55
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/applicator
generated
vendored
Normal file
55
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/applicator
generated
vendored
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/applicator",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/applicator": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Applicator vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"additionalItems": { "$recursiveRef": "#" },
|
||||
"unevaluatedItems": { "$recursiveRef": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$recursiveRef": "#" },
|
||||
{ "$ref": "#/$defs/schemaArray" }
|
||||
]
|
||||
},
|
||||
"contains": { "$recursiveRef": "#" },
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"unevaluatedProperties": { "$recursiveRef": "#" },
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependentSchemas": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$recursiveRef": "#"
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$recursiveRef": "#" },
|
||||
"if": { "$recursiveRef": "#" },
|
||||
"then": { "$recursiveRef": "#" },
|
||||
"else": { "$recursiveRef": "#" },
|
||||
"allOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"anyOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"oneOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"not": { "$recursiveRef": "#" }
|
||||
},
|
||||
"$defs": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$recursiveRef": "#" }
|
||||
}
|
||||
}
|
||||
}
|
||||
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/content
generated
vendored
Normal file
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/content
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/content",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/content": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Content vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentEncoding": { "type": "string" },
|
||||
"contentSchema": { "$recursiveRef": "#" }
|
||||
}
|
||||
}
|
||||
56
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/core
generated
vendored
Normal file
56
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/core
generated
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/core",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/core": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Core vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference",
|
||||
"$comment": "Non-empty fragments not allowed.",
|
||||
"pattern": "^[^#]*#?$"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$anchor": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z][-A-Za-z0-9.:_]*$"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$recursiveRef": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$recursiveAnchor": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"$vocabulary": {
|
||||
"type": "object",
|
||||
"propertyNames": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"additionalProperties": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"$defs": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"default": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/format
generated
vendored
Normal file
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/format
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/format",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/format": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Format vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"format": { "type": "string" }
|
||||
}
|
||||
}
|
||||
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/meta-data
generated
vendored
Normal file
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/meta-data
generated
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/meta-data",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/meta-data": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Meta-data vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"deprecated": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
}
|
||||
}
|
||||
}
|
||||
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/validation
generated
vendored
Normal file
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/meta/validation
generated
vendored
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/meta/validation",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/validation": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Validation vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minContains": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 1
|
||||
},
|
||||
"maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/$defs/stringArray" },
|
||||
"dependentRequired": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
}
|
||||
},
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/$defs/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/$defs/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 0
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
}
|
||||
}
|
||||
41
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/schema
generated
vendored
Normal file
41
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2019-09/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$id": "https://json-schema.org/draft/2019-09/schema",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2019-09/vocab/core": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/applicator": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/validation": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/meta-data": true,
|
||||
"https://json-schema.org/draft/2019-09/vocab/format": false,
|
||||
"https://json-schema.org/draft/2019-09/vocab/content": true
|
||||
},
|
||||
"$recursiveAnchor": true,
|
||||
"title": "Core and Validation specifications meta-schema",
|
||||
"allOf": [
|
||||
{"$ref": "meta/core"},
|
||||
{"$ref": "meta/applicator"},
|
||||
{"$ref": "meta/validation"},
|
||||
{"$ref": "meta/meta-data"},
|
||||
{"$ref": "meta/format"},
|
||||
{"$ref": "meta/content"}
|
||||
],
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"definitions": {
|
||||
"$comment": "While no longer an official keyword as it is replaced by $defs, this keyword is retained in the meta-schema to prevent incompatible extensions as it remains in common use.",
|
||||
"type": "object",
|
||||
"additionalProperties": { "$recursiveRef": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"$comment": "\"dependencies\" is no longer a keyword, but schema authors should avoid redefining it to facilitate a smooth transition to \"dependentSchemas\" and \"dependentRequired\"",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$recursiveRef": "#" },
|
||||
{ "$ref": "meta/validation#/$defs/stringArray" }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
47
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/applicator
generated
vendored
Normal file
47
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/applicator
generated
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/applicator",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/applicator": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Applicator vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"prefixItems": { "$ref": "#/$defs/schemaArray" },
|
||||
"items": { "$dynamicRef": "#meta" },
|
||||
"contains": { "$dynamicRef": "#meta" },
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependentSchemas": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"default": {}
|
||||
},
|
||||
"propertyNames": { "$dynamicRef": "#meta" },
|
||||
"if": { "$dynamicRef": "#meta" },
|
||||
"then": { "$dynamicRef": "#meta" },
|
||||
"else": { "$dynamicRef": "#meta" },
|
||||
"allOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"anyOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"oneOf": { "$ref": "#/$defs/schemaArray" },
|
||||
"not": { "$dynamicRef": "#meta" }
|
||||
},
|
||||
"$defs": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
}
|
||||
}
|
||||
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/content
generated
vendored
Normal file
15
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/content
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/content",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/content": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Content vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"contentEncoding": { "type": "string" },
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentSchema": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
}
|
||||
50
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/core
generated
vendored
Normal file
50
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/core
generated
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/core",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/core": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Core vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"$ref": "#/$defs/uriReferenceString",
|
||||
"$comment": "Non-empty fragments not allowed.",
|
||||
"pattern": "^[^#]*#?$"
|
||||
},
|
||||
"$schema": { "$ref": "#/$defs/uriString" },
|
||||
"$ref": { "$ref": "#/$defs/uriReferenceString" },
|
||||
"$anchor": { "$ref": "#/$defs/anchorString" },
|
||||
"$dynamicRef": { "$ref": "#/$defs/uriReferenceString" },
|
||||
"$dynamicAnchor": { "$ref": "#/$defs/anchorString" },
|
||||
"$vocabulary": {
|
||||
"type": "object",
|
||||
"propertyNames": { "$ref": "#/$defs/uriString" },
|
||||
"additionalProperties": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"$defs": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"anchorString": {
|
||||
"type": "string",
|
||||
"pattern": "^[A-Za-z_][-A-Za-z0-9._]*$"
|
||||
},
|
||||
"uriString": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"uriReferenceString": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
}
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-annotation
generated
vendored
Normal file
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-annotation
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/format-annotation",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-annotation": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Format vocabulary meta-schema for annotation results",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"format": { "type": "string" }
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-assertion
generated
vendored
Normal file
13
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/format-assertion
generated
vendored
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/format-assertion",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-assertion": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Format vocabulary meta-schema for assertion results",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"format": { "type": "string" }
|
||||
}
|
||||
}
|
||||
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/meta-data
generated
vendored
Normal file
35
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/meta-data
generated
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/meta-data",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/meta-data": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Meta-data vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"deprecated": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"writeOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
}
|
||||
}
|
||||
}
|
||||
14
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/unevaluated
generated
vendored
Normal file
14
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/unevaluated
generated
vendored
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/unevaluated",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/unevaluated": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Unevaluated applicator vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"unevaluatedItems": { "$dynamicRef": "#meta" },
|
||||
"unevaluatedProperties": { "$dynamicRef": "#meta" }
|
||||
}
|
||||
}
|
||||
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/validation
generated
vendored
Normal file
97
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/meta/validation
generated
vendored
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/meta/validation",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/validation": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Validation vocabulary meta-schema",
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/$defs/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/$defs/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"maxItems": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"maxContains": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minContains": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 1
|
||||
},
|
||||
"maxProperties": { "$ref": "#/$defs/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/$defs/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/$defs/stringArray" },
|
||||
"dependentRequired": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/stringArray"
|
||||
}
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"$ref": "#/$defs/nonNegativeInteger",
|
||||
"default": 0
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
}
|
||||
}
|
||||
57
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/schema
generated
vendored
Normal file
57
vendor/github.com/santhosh-tekuri/jsonschema/v6/metaschemas/draft/2020-12/schema
generated
vendored
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$vocabulary": {
|
||||
"https://json-schema.org/draft/2020-12/vocab/core": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/applicator": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/unevaluated": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/validation": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/meta-data": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/format-annotation": true,
|
||||
"https://json-schema.org/draft/2020-12/vocab/content": true
|
||||
},
|
||||
"$dynamicAnchor": "meta",
|
||||
"title": "Core and Validation specifications meta-schema",
|
||||
"allOf": [
|
||||
{"$ref": "meta/core"},
|
||||
{"$ref": "meta/applicator"},
|
||||
{"$ref": "meta/unevaluated"},
|
||||
{"$ref": "meta/validation"},
|
||||
{"$ref": "meta/meta-data"},
|
||||
{"$ref": "meta/format-annotation"},
|
||||
{"$ref": "meta/content"}
|
||||
],
|
||||
"type": ["object", "boolean"],
|
||||
"$comment": "This meta-schema also defines keywords that have appeared in previous drafts in order to prevent incompatible extensions as they remain in common use.",
|
||||
"properties": {
|
||||
"definitions": {
|
||||
"$comment": "\"definitions\" has been replaced by \"$defs\".",
|
||||
"type": "object",
|
||||
"additionalProperties": { "$dynamicRef": "#meta" },
|
||||
"deprecated": true,
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"$comment": "\"dependencies\" has been split and replaced by \"dependentSchemas\" and \"dependentRequired\" in order to serve their differing semantics.",
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$dynamicRef": "#meta" },
|
||||
{ "$ref": "meta/validation#/$defs/stringArray" }
|
||||
]
|
||||
},
|
||||
"deprecated": true,
|
||||
"default": {}
|
||||
},
|
||||
"$recursiveAnchor": {
|
||||
"$comment": "\"$recursiveAnchor\" has been replaced by \"$dynamicAnchor\".",
|
||||
"$ref": "meta/core#/$defs/anchorString",
|
||||
"deprecated": true
|
||||
},
|
||||
"$recursiveRef": {
|
||||
"$comment": "\"$recursiveRef\" has been replaced by \"$dynamicRef\".",
|
||||
"$ref": "meta/core#/$defs/uriReferenceString",
|
||||
"deprecated": true
|
||||
}
|
||||
}
|
||||
}
|
||||
549
vendor/github.com/santhosh-tekuri/jsonschema/v6/objcompiler.go
generated
vendored
Normal file
549
vendor/github.com/santhosh-tekuri/jsonschema/v6/objcompiler.go
generated
vendored
Normal file
|
|
@ -0,0 +1,549 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type objCompiler struct {
|
||||
c *Compiler
|
||||
obj map[string]any
|
||||
up urlPtr
|
||||
r *root
|
||||
res *resource
|
||||
q *queue
|
||||
}
|
||||
|
||||
func (c *objCompiler) compile(s *Schema) error {
|
||||
// id --
|
||||
if id := c.res.dialect.draft.getID(c.obj); id != "" {
|
||||
s.ID = id
|
||||
}
|
||||
|
||||
// anchor --
|
||||
if s.DraftVersion < 2019 {
|
||||
// anchor is specified in id
|
||||
id := c.string(c.res.dialect.draft.id)
|
||||
if id != "" {
|
||||
_, f := split(id)
|
||||
if f != "" {
|
||||
var err error
|
||||
s.Anchor, err = decode(f)
|
||||
if err != nil {
|
||||
return &ParseAnchorError{URL: s.Location}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s.Anchor = c.string("$anchor")
|
||||
}
|
||||
|
||||
if err := c.compileDraft4(s); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.DraftVersion >= 6 {
|
||||
if err := c.compileDraft6(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if s.DraftVersion >= 7 {
|
||||
if err := c.compileDraft7(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if s.DraftVersion >= 2019 {
|
||||
if err := c.compileDraft2019(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if s.DraftVersion >= 2020 {
|
||||
if err := c.compileDraft2020(s); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// vocabularies
|
||||
vocabs := c.res.dialect.activeVocabs(c.c.roots.assertVocabs, c.c.roots.vocabularies)
|
||||
for _, vocab := range vocabs {
|
||||
v := c.c.roots.vocabularies[vocab]
|
||||
if v == nil {
|
||||
continue
|
||||
}
|
||||
ext, err := v.Compile(&CompilerContext{c}, c.obj)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ext != nil {
|
||||
s.Extensions = append(s.Extensions, ext)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft4(s *Schema) error {
|
||||
var err error
|
||||
|
||||
if c.hasVocab("core") {
|
||||
if s.Ref, err = c.enqueueRef("$ref"); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.DraftVersion < 2019 && s.Ref != nil {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if c.hasVocab("applicator") {
|
||||
s.AllOf = c.enqueueArr("allOf")
|
||||
s.AnyOf = c.enqueueArr("anyOf")
|
||||
s.OneOf = c.enqueueArr("oneOf")
|
||||
s.Not = c.enqueueProp("not")
|
||||
|
||||
if s.DraftVersion < 2020 {
|
||||
if items, ok := c.obj["items"]; ok {
|
||||
if _, ok := items.([]any); ok {
|
||||
s.Items = c.enqueueArr("items")
|
||||
s.AdditionalItems = c.enqueueAdditional("additionalItems")
|
||||
} else {
|
||||
s.Items = c.enqueueProp("items")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
s.Properties = c.enqueueMap("properties")
|
||||
if m := c.enqueueMap("patternProperties"); m != nil {
|
||||
s.PatternProperties = map[Regexp]*Schema{}
|
||||
for pname, sch := range m {
|
||||
re, err := c.c.roots.regexpEngine(pname)
|
||||
if err != nil {
|
||||
return &InvalidRegexError{c.up.format("patternProperties"), pname, err}
|
||||
}
|
||||
s.PatternProperties[re] = sch
|
||||
}
|
||||
}
|
||||
s.AdditionalProperties = c.enqueueAdditional("additionalProperties")
|
||||
|
||||
if m := c.objVal("dependencies"); m != nil {
|
||||
s.Dependencies = map[string]any{}
|
||||
for pname, pvalue := range m {
|
||||
if arr, ok := pvalue.([]any); ok {
|
||||
s.Dependencies[pname] = toStrings(arr)
|
||||
} else {
|
||||
ptr := c.up.ptr.append2("dependencies", pname)
|
||||
s.Dependencies[pname] = c.enqueuePtr(ptr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.hasVocab("validation") {
|
||||
if t, ok := c.obj["type"]; ok {
|
||||
s.Types = newTypes(t)
|
||||
}
|
||||
if arr := c.arrVal("enum"); arr != nil {
|
||||
s.Enum = newEnum(arr)
|
||||
}
|
||||
s.MultipleOf = c.numVal("multipleOf")
|
||||
s.Maximum = c.numVal("maximum")
|
||||
if c.boolean("exclusiveMaximum") {
|
||||
s.ExclusiveMaximum = s.Maximum
|
||||
s.Maximum = nil
|
||||
} else {
|
||||
s.ExclusiveMaximum = c.numVal("exclusiveMaximum")
|
||||
}
|
||||
s.Minimum = c.numVal("minimum")
|
||||
if c.boolean("exclusiveMinimum") {
|
||||
s.ExclusiveMinimum = s.Minimum
|
||||
s.Minimum = nil
|
||||
} else {
|
||||
s.ExclusiveMinimum = c.numVal("exclusiveMinimum")
|
||||
}
|
||||
|
||||
s.MinLength = c.intVal("minLength")
|
||||
s.MaxLength = c.intVal("maxLength")
|
||||
if pat := c.strVal("pattern"); pat != nil {
|
||||
s.Pattern, err = c.c.roots.regexpEngine(*pat)
|
||||
if err != nil {
|
||||
return &InvalidRegexError{c.up.format("pattern"), *pat, err}
|
||||
}
|
||||
}
|
||||
|
||||
s.MinItems = c.intVal("minItems")
|
||||
s.MaxItems = c.intVal("maxItems")
|
||||
s.UniqueItems = c.boolean("uniqueItems")
|
||||
|
||||
s.MaxProperties = c.intVal("maxProperties")
|
||||
s.MinProperties = c.intVal("minProperties")
|
||||
if arr := c.arrVal("required"); arr != nil {
|
||||
s.Required = toStrings(arr)
|
||||
}
|
||||
}
|
||||
|
||||
// format --
|
||||
if c.assertFormat(s.DraftVersion) {
|
||||
if f := c.strVal("format"); f != nil {
|
||||
if *f == "regex" {
|
||||
s.Format = &Format{
|
||||
Name: "regex",
|
||||
Validate: c.c.roots.regexpEngine.validate,
|
||||
}
|
||||
} else {
|
||||
s.Format = c.c.formats[*f]
|
||||
if s.Format == nil {
|
||||
s.Format = formats[*f]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// annotations --
|
||||
s.Title = c.string("title")
|
||||
s.Description = c.string("description")
|
||||
if v, ok := c.obj["default"]; ok {
|
||||
s.Default = &v
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft6(s *Schema) error {
|
||||
if c.hasVocab("applicator") {
|
||||
s.Contains = c.enqueueProp("contains")
|
||||
s.PropertyNames = c.enqueueProp("propertyNames")
|
||||
}
|
||||
if c.hasVocab("validation") {
|
||||
if v, ok := c.obj["const"]; ok {
|
||||
s.Const = &v
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft7(s *Schema) error {
|
||||
if c.hasVocab("applicator") {
|
||||
s.If = c.enqueueProp("if")
|
||||
if s.If != nil {
|
||||
b := c.boolVal("if")
|
||||
if b == nil || *b {
|
||||
s.Then = c.enqueueProp("then")
|
||||
}
|
||||
if b == nil || !*b {
|
||||
s.Else = c.enqueueProp("else")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.c.assertContent {
|
||||
if ce := c.strVal("contentEncoding"); ce != nil {
|
||||
s.ContentEncoding = c.c.decoders[*ce]
|
||||
if s.ContentEncoding == nil {
|
||||
s.ContentEncoding = decoders[*ce]
|
||||
}
|
||||
}
|
||||
if cm := c.strVal("contentMediaType"); cm != nil {
|
||||
s.ContentMediaType = c.c.mediaTypes[*cm]
|
||||
if s.ContentMediaType == nil {
|
||||
s.ContentMediaType = mediaTypes[*cm]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// annotations --
|
||||
s.Comment = c.string("$comment")
|
||||
s.ReadOnly = c.boolean("readOnly")
|
||||
s.WriteOnly = c.boolean("writeOnly")
|
||||
if arr, ok := c.obj["examples"].([]any); ok {
|
||||
s.Examples = arr
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft2019(s *Schema) error {
|
||||
var err error
|
||||
|
||||
if c.hasVocab("core") {
|
||||
if s.RecursiveRef, err = c.enqueueRef("$recursiveRef"); err != nil {
|
||||
return err
|
||||
}
|
||||
s.RecursiveAnchor = c.boolean("$recursiveAnchor")
|
||||
}
|
||||
|
||||
if c.hasVocab("validation") {
|
||||
if s.Contains != nil {
|
||||
s.MinContains = c.intVal("minContains")
|
||||
s.MaxContains = c.intVal("maxContains")
|
||||
}
|
||||
if m := c.objVal("dependentRequired"); m != nil {
|
||||
s.DependentRequired = map[string][]string{}
|
||||
for pname, pvalue := range m {
|
||||
if arr, ok := pvalue.([]any); ok {
|
||||
s.DependentRequired[pname] = toStrings(arr)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if c.hasVocab("applicator") {
|
||||
s.DependentSchemas = c.enqueueMap("dependentSchemas")
|
||||
}
|
||||
|
||||
var unevaluated bool
|
||||
if s.DraftVersion == 2019 {
|
||||
unevaluated = c.hasVocab("applicator")
|
||||
} else {
|
||||
unevaluated = c.hasVocab("unevaluated")
|
||||
}
|
||||
if unevaluated {
|
||||
s.UnevaluatedItems = c.enqueueProp("unevaluatedItems")
|
||||
s.UnevaluatedProperties = c.enqueueProp("unevaluatedProperties")
|
||||
}
|
||||
|
||||
if c.c.assertContent {
|
||||
if s.ContentMediaType != nil && s.ContentMediaType.UnmarshalJSON != nil {
|
||||
s.ContentSchema = c.enqueueProp("contentSchema")
|
||||
}
|
||||
}
|
||||
|
||||
// annotations --
|
||||
s.Deprecated = c.boolean("deprecated")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) compileDraft2020(s *Schema) error {
|
||||
if c.hasVocab("core") {
|
||||
sch, err := c.enqueueRef("$dynamicRef")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if sch != nil {
|
||||
dref := c.strVal("$dynamicRef")
|
||||
_, frag, err := splitFragment(*dref)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var anch string
|
||||
if anchor, ok := frag.convert().(anchor); ok {
|
||||
anch = string(anchor)
|
||||
}
|
||||
s.DynamicRef = &DynamicRef{sch, anch}
|
||||
}
|
||||
s.DynamicAnchor = c.string("$dynamicAnchor")
|
||||
}
|
||||
|
||||
if c.hasVocab("applicator") {
|
||||
s.PrefixItems = c.enqueueArr("prefixItems")
|
||||
s.Items2020 = c.enqueueProp("items")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// enqueue helpers --
|
||||
|
||||
func (c *objCompiler) enqueuePtr(ptr jsonPointer) *Schema {
|
||||
up := urlPtr{c.up.url, ptr}
|
||||
return c.c.enqueue(c.q, up)
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueRef(pname string) (*Schema, error) {
|
||||
ref := c.strVal(pname)
|
||||
if ref == nil {
|
||||
return nil, nil
|
||||
}
|
||||
baseURL := c.res.id
|
||||
// baseURL := c.r.baseURL(c.up.ptr)
|
||||
uf, err := baseURL.join(*ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
up, err := c.r.resolve(*uf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if up != nil {
|
||||
// local ref
|
||||
return c.enqueuePtr(up.ptr), nil
|
||||
}
|
||||
|
||||
// remote ref
|
||||
up_, err := c.c.roots.resolveFragment(*uf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c.c.enqueue(c.q, up_), nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueProp(pname string) *Schema {
|
||||
if _, ok := c.obj[pname]; !ok {
|
||||
return nil
|
||||
}
|
||||
ptr := c.up.ptr.append(pname)
|
||||
return c.enqueuePtr(ptr)
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueArr(pname string) []*Schema {
|
||||
arr := c.arrVal(pname)
|
||||
if arr == nil {
|
||||
return nil
|
||||
}
|
||||
sch := make([]*Schema, len(arr))
|
||||
for i := range arr {
|
||||
ptr := c.up.ptr.append2(pname, strconv.Itoa(i))
|
||||
sch[i] = c.enqueuePtr(ptr)
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueMap(pname string) map[string]*Schema {
|
||||
obj := c.objVal(pname)
|
||||
if obj == nil {
|
||||
return nil
|
||||
}
|
||||
sch := make(map[string]*Schema)
|
||||
for k := range obj {
|
||||
ptr := c.up.ptr.append2(pname, k)
|
||||
sch[k] = c.enqueuePtr(ptr)
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (c *objCompiler) enqueueAdditional(pname string) any {
|
||||
if b := c.boolVal(pname); b != nil {
|
||||
return *b
|
||||
}
|
||||
if sch := c.enqueueProp(pname); sch != nil {
|
||||
return sch
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func (c *objCompiler) hasVocab(name string) bool {
|
||||
return c.res.dialect.hasVocab(name)
|
||||
}
|
||||
|
||||
func (c *objCompiler) assertFormat(draftVersion int) bool {
|
||||
if c.c.assertFormat || draftVersion < 2019 {
|
||||
return true
|
||||
}
|
||||
if draftVersion == 2019 {
|
||||
return c.hasVocab("format")
|
||||
} else {
|
||||
return c.hasVocab("format-assertion")
|
||||
}
|
||||
}
|
||||
|
||||
// value helpers --
|
||||
|
||||
func (c *objCompiler) boolVal(pname string) *bool {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
b, ok := v.(bool)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return &b
|
||||
}
|
||||
|
||||
func (c *objCompiler) boolean(pname string) bool {
|
||||
b := c.boolVal(pname)
|
||||
return b != nil && *b
|
||||
}
|
||||
|
||||
func (c *objCompiler) strVal(pname string) *string {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return &s
|
||||
}
|
||||
|
||||
func (c *objCompiler) string(pname string) string {
|
||||
if s := c.strVal(pname); s != nil {
|
||||
return *s
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (c *objCompiler) numVal(pname string) *big.Rat {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
switch v.(type) {
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
if n, ok := new(big.Rat).SetString(fmt.Sprint(v)); ok {
|
||||
return n
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) intVal(pname string) *int {
|
||||
if n := c.numVal(pname); n != nil && n.IsInt() {
|
||||
n := int(n.Num().Int64())
|
||||
return &n
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *objCompiler) objVal(pname string) map[string]any {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
obj, ok := v.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
func (c *objCompiler) arrVal(pname string) []any {
|
||||
v, ok := c.obj[pname]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
arr, ok := v.([]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type InvalidRegexError struct {
|
||||
URL string
|
||||
Regex string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *InvalidRegexError) Error() string {
|
||||
return fmt.Sprintf("invalid regex %q at %q: %v", e.Regex, e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func toStrings(arr []any) []string {
|
||||
var strings []string
|
||||
for _, item := range arr {
|
||||
if s, ok := item.(string); ok {
|
||||
strings = append(strings, s)
|
||||
}
|
||||
}
|
||||
return strings
|
||||
}
|
||||
212
vendor/github.com/santhosh-tekuri/jsonschema/v6/output.go
generated
vendored
Normal file
212
vendor/github.com/santhosh-tekuri/jsonschema/v6/output.go
generated
vendored
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6/kind"
|
||||
"golang.org/x/text/language"
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
var defaultPrinter = message.NewPrinter(language.English)
|
||||
|
||||
// format ---
|
||||
|
||||
func (e *ValidationError) schemaURL() string {
|
||||
if ref, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
return ref.URL
|
||||
} else {
|
||||
return e.SchemaURL
|
||||
}
|
||||
}
|
||||
|
||||
func (e *ValidationError) absoluteKeywordLocation() string {
|
||||
var schemaURL string
|
||||
var keywordPath []string
|
||||
if ref, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
schemaURL = ref.URL
|
||||
keywordPath = nil
|
||||
} else {
|
||||
schemaURL = e.SchemaURL
|
||||
keywordPath = e.ErrorKind.KeywordPath()
|
||||
}
|
||||
return fmt.Sprintf("%s%s", schemaURL, encode(jsonPtr(keywordPath)))
|
||||
}
|
||||
|
||||
func (e *ValidationError) skip() bool {
|
||||
if len(e.Causes) == 1 {
|
||||
_, ok := e.ErrorKind.(*kind.Reference)
|
||||
return ok
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (e *ValidationError) display(sb *strings.Builder, verbose bool, indent int, absKwLoc string, p *message.Printer) {
|
||||
if !e.skip() {
|
||||
if indent > 0 {
|
||||
sb.WriteByte('\n')
|
||||
for i := 0; i < indent-1; i++ {
|
||||
sb.WriteString(" ")
|
||||
}
|
||||
sb.WriteString("- ")
|
||||
}
|
||||
indent = indent + 1
|
||||
|
||||
prevAbsKwLoc := absKwLoc
|
||||
absKwLoc = e.absoluteKeywordLocation()
|
||||
|
||||
if _, ok := e.ErrorKind.(*kind.Schema); ok {
|
||||
sb.WriteString(e.ErrorKind.LocalizedString(p))
|
||||
} else {
|
||||
sb.WriteString(p.Sprintf("at %s", quote(jsonPtr(e.InstanceLocation))))
|
||||
if verbose {
|
||||
schLoc := absKwLoc
|
||||
if prevAbsKwLoc != "" {
|
||||
pu, _ := split(prevAbsKwLoc)
|
||||
u, f := split(absKwLoc)
|
||||
if u == pu {
|
||||
schLoc = fmt.Sprintf("S#%s", f)
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(sb, " [%s]", schLoc)
|
||||
}
|
||||
fmt.Fprintf(sb, ": %s", e.ErrorKind.LocalizedString(p))
|
||||
}
|
||||
}
|
||||
for _, cause := range e.Causes {
|
||||
cause.display(sb, verbose, indent, absKwLoc, p)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *ValidationError) Error() string {
|
||||
return e.LocalizedError(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedError(p *message.Printer) string {
|
||||
var sb strings.Builder
|
||||
e.display(&sb, false, 0, "", p)
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (e *ValidationError) GoString() string {
|
||||
return e.LocalizedGoString(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedGoString(p *message.Printer) string {
|
||||
var sb strings.Builder
|
||||
e.display(&sb, true, 0, "", p)
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func jsonPtr(tokens []string) string {
|
||||
var sb strings.Builder
|
||||
for _, tok := range tokens {
|
||||
sb.WriteByte('/')
|
||||
sb.WriteString(escape(tok))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// Flag is output format with simple boolean property valid.
|
||||
type FlagOutput struct {
|
||||
Valid bool `json:"valid"`
|
||||
}
|
||||
|
||||
// The `Flag` output format, merely the boolean result.
|
||||
func (e *ValidationError) FlagOutput() *FlagOutput {
|
||||
return &FlagOutput{Valid: false}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type OutputUnit struct {
|
||||
Valid bool `json:"valid"`
|
||||
KeywordLocation string `json:"keywordLocation"`
|
||||
AbsoluteKeywordLocation string `json:"AbsoluteKeywordLocation,omitempty"`
|
||||
InstanceLocation string `json:"instanceLocation"`
|
||||
Error *OutputError `json:"error,omitempty"`
|
||||
Errors []OutputUnit `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
type OutputError struct {
|
||||
Kind ErrorKind
|
||||
p *message.Printer
|
||||
}
|
||||
|
||||
func (k OutputError) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(k.Kind.LocalizedString(k.p))
|
||||
}
|
||||
|
||||
// The `Basic` structure, a flat list of output units.
|
||||
func (e *ValidationError) BasicOutput() *OutputUnit {
|
||||
return e.LocalizedBasicOutput(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedBasicOutput(p *message.Printer) *OutputUnit {
|
||||
out := e.output(true, false, "", "", p)
|
||||
return &out
|
||||
}
|
||||
|
||||
// The `Detailed` structure, based on the schema.
|
||||
func (e *ValidationError) DetailedOutput() *OutputUnit {
|
||||
return e.LocalizedDetailedOutput(defaultPrinter)
|
||||
}
|
||||
|
||||
func (e *ValidationError) LocalizedDetailedOutput(p *message.Printer) *OutputUnit {
|
||||
out := e.output(false, false, "", "", p)
|
||||
return &out
|
||||
}
|
||||
|
||||
func (e *ValidationError) output(flatten, inRef bool, schemaURL, kwLoc string, p *message.Printer) OutputUnit {
|
||||
if !inRef {
|
||||
if _, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
inRef = true
|
||||
}
|
||||
}
|
||||
if schemaURL != "" {
|
||||
kwLoc += e.SchemaURL[len(schemaURL):]
|
||||
if ref, ok := e.ErrorKind.(*kind.Reference); ok {
|
||||
kwLoc += jsonPtr(ref.KeywordPath())
|
||||
}
|
||||
}
|
||||
schemaURL = e.schemaURL()
|
||||
|
||||
keywordLocation := kwLoc
|
||||
if _, ok := e.ErrorKind.(*kind.Reference); !ok {
|
||||
keywordLocation += jsonPtr(e.ErrorKind.KeywordPath())
|
||||
}
|
||||
|
||||
out := OutputUnit{
|
||||
Valid: false,
|
||||
InstanceLocation: jsonPtr(e.InstanceLocation),
|
||||
KeywordLocation: keywordLocation,
|
||||
}
|
||||
if inRef {
|
||||
out.AbsoluteKeywordLocation = e.absoluteKeywordLocation()
|
||||
}
|
||||
for _, cause := range e.Causes {
|
||||
causeOut := cause.output(flatten, inRef, schemaURL, kwLoc, p)
|
||||
if cause.skip() {
|
||||
causeOut = causeOut.Errors[0]
|
||||
}
|
||||
if flatten {
|
||||
errors := causeOut.Errors
|
||||
causeOut.Errors = nil
|
||||
causeOut.Error = &OutputError{cause.ErrorKind, p}
|
||||
out.Errors = append(out.Errors, causeOut)
|
||||
if len(errors) > 0 {
|
||||
out.Errors = append(out.Errors, errors...)
|
||||
}
|
||||
} else {
|
||||
out.Errors = append(out.Errors, causeOut)
|
||||
}
|
||||
}
|
||||
if len(out.Errors) == 0 {
|
||||
out.Error = &OutputError{e.ErrorKind, p}
|
||||
}
|
||||
return out
|
||||
}
|
||||
142
vendor/github.com/santhosh-tekuri/jsonschema/v6/position.go
generated
vendored
Normal file
142
vendor/github.com/santhosh-tekuri/jsonschema/v6/position.go
generated
vendored
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Position tells possible tokens in json.
|
||||
type Position interface {
|
||||
collect(v any, ptr jsonPointer) map[jsonPointer]any
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AllProp struct{}
|
||||
|
||||
func (AllProp) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
obj, ok := v.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
m := map[jsonPointer]any{}
|
||||
for pname, pvalue := range obj {
|
||||
m[ptr.append(pname)] = pvalue
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AllItem struct{}
|
||||
|
||||
func (AllItem) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
arr, ok := v.([]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
m := map[jsonPointer]any{}
|
||||
for i, item := range arr {
|
||||
m[ptr.append(strconv.Itoa(i))] = item
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Prop string
|
||||
|
||||
func (p Prop) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
obj, ok := v.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
pvalue, ok := obj[string(p)]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return map[jsonPointer]any{
|
||||
ptr.append(string(p)): pvalue,
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Item int
|
||||
|
||||
func (i Item) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
arr, ok := v.([]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if i < 0 || int(i) >= len(arr) {
|
||||
return nil
|
||||
}
|
||||
return map[jsonPointer]any{
|
||||
ptr.append(strconv.Itoa(int(i))): arr[int(i)],
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// SchemaPath tells where to look for subschema inside keyword.
|
||||
type SchemaPath []Position
|
||||
|
||||
func schemaPath(path string) SchemaPath {
|
||||
var sp SchemaPath
|
||||
for _, tok := range strings.Split(path, "/") {
|
||||
var pos Position
|
||||
switch tok {
|
||||
case "*":
|
||||
pos = AllProp{}
|
||||
case "[]":
|
||||
pos = AllItem{}
|
||||
default:
|
||||
if i, err := strconv.Atoi(tok); err == nil {
|
||||
pos = Item(i)
|
||||
} else {
|
||||
pos = Prop(tok)
|
||||
}
|
||||
}
|
||||
sp = append(sp, pos)
|
||||
}
|
||||
return sp
|
||||
}
|
||||
|
||||
func (sp SchemaPath) collect(v any, ptr jsonPointer) map[jsonPointer]any {
|
||||
if len(sp) == 0 {
|
||||
return map[jsonPointer]any{
|
||||
ptr: v,
|
||||
}
|
||||
}
|
||||
p, sp := sp[0], sp[1:]
|
||||
m := p.collect(v, ptr)
|
||||
mm := map[jsonPointer]any{}
|
||||
for ptr, v := range m {
|
||||
m = sp.collect(v, ptr)
|
||||
for k, v := range m {
|
||||
mm[k] = v
|
||||
}
|
||||
}
|
||||
return mm
|
||||
}
|
||||
|
||||
func (sp SchemaPath) String() string {
|
||||
var sb strings.Builder
|
||||
for _, pos := range sp {
|
||||
if sb.Len() != 0 {
|
||||
sb.WriteByte('/')
|
||||
}
|
||||
switch pos := pos.(type) {
|
||||
case AllProp:
|
||||
sb.WriteString("*")
|
||||
case AllItem:
|
||||
sb.WriteString("[]")
|
||||
case Prop:
|
||||
sb.WriteString(string(pos))
|
||||
case Item:
|
||||
sb.WriteString(strconv.Itoa(int(pos)))
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
202
vendor/github.com/santhosh-tekuri/jsonschema/v6/root.go
generated
vendored
Normal file
202
vendor/github.com/santhosh-tekuri/jsonschema/v6/root.go
generated
vendored
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type root struct {
|
||||
url url
|
||||
doc any
|
||||
resources map[jsonPointer]*resource
|
||||
subschemasProcessed map[jsonPointer]struct{}
|
||||
}
|
||||
|
||||
func (r *root) rootResource() *resource {
|
||||
return r.resources[""]
|
||||
}
|
||||
|
||||
func (r *root) resource(ptr jsonPointer) *resource {
|
||||
for {
|
||||
if res, ok := r.resources[ptr]; ok {
|
||||
return res
|
||||
}
|
||||
slash := strings.LastIndexByte(string(ptr), '/')
|
||||
if slash == -1 {
|
||||
break
|
||||
}
|
||||
ptr = ptr[:slash]
|
||||
}
|
||||
return r.rootResource()
|
||||
}
|
||||
|
||||
func (r *root) resolveFragmentIn(frag fragment, res *resource) (urlPtr, error) {
|
||||
var ptr jsonPointer
|
||||
switch f := frag.convert().(type) {
|
||||
case jsonPointer:
|
||||
ptr = res.ptr.concat(f)
|
||||
case anchor:
|
||||
aptr, ok := res.anchors[f]
|
||||
if !ok {
|
||||
return urlPtr{}, &AnchorNotFoundError{
|
||||
URL: r.url.String(),
|
||||
Reference: (&urlFrag{res.id, frag}).String(),
|
||||
}
|
||||
}
|
||||
ptr = aptr
|
||||
}
|
||||
return urlPtr{r.url, ptr}, nil
|
||||
}
|
||||
|
||||
func (r *root) resolveFragment(frag fragment) (urlPtr, error) {
|
||||
return r.resolveFragmentIn(frag, r.rootResource())
|
||||
}
|
||||
|
||||
// resovles urlFrag to urlPtr from root.
|
||||
// returns nil if it is external.
|
||||
func (r *root) resolve(uf urlFrag) (*urlPtr, error) {
|
||||
var res *resource
|
||||
if uf.url == r.url {
|
||||
res = r.rootResource()
|
||||
} else {
|
||||
// look for resource with id==uf.url
|
||||
for _, v := range r.resources {
|
||||
if v.id == uf.url {
|
||||
res = v
|
||||
break
|
||||
}
|
||||
}
|
||||
if res == nil {
|
||||
return nil, nil // external url
|
||||
}
|
||||
}
|
||||
up, err := r.resolveFragmentIn(uf.frag, res)
|
||||
return &up, err
|
||||
}
|
||||
|
||||
func (r *root) collectAnchors(sch any, schPtr jsonPointer, res *resource) error {
|
||||
obj, ok := sch.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
addAnchor := func(anchor anchor) error {
|
||||
ptr1, ok := res.anchors[anchor]
|
||||
if ok {
|
||||
if ptr1 == schPtr {
|
||||
// anchor with same root_ptr already exists
|
||||
return nil
|
||||
}
|
||||
return &DuplicateAnchorError{
|
||||
string(anchor), r.url.String(), string(ptr1), string(schPtr),
|
||||
}
|
||||
}
|
||||
res.anchors[anchor] = schPtr
|
||||
return nil
|
||||
}
|
||||
|
||||
if res.dialect.draft.version < 2019 {
|
||||
if _, ok := obj["$ref"]; ok {
|
||||
// All other properties in a "$ref" object MUST be ignored
|
||||
return nil
|
||||
}
|
||||
// anchor is specified in id
|
||||
if id, ok := strVal(obj, res.dialect.draft.id); ok {
|
||||
_, frag, err := splitFragment(id)
|
||||
if err != nil {
|
||||
loc := urlPtr{r.url, schPtr}
|
||||
return &ParseAnchorError{loc.String()}
|
||||
}
|
||||
if anchor, ok := frag.convert().(anchor); ok {
|
||||
if err := addAnchor(anchor); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if res.dialect.draft.version >= 2019 {
|
||||
if s, ok := strVal(obj, "$anchor"); ok {
|
||||
if err := addAnchor(anchor(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
if res.dialect.draft.version >= 2020 {
|
||||
if s, ok := strVal(obj, "$dynamicAnchor"); ok {
|
||||
if err := addAnchor(anchor(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
res.dynamicAnchors = append(res.dynamicAnchors, anchor(s))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *root) clone() *root {
|
||||
processed := map[jsonPointer]struct{}{}
|
||||
for k := range r.subschemasProcessed {
|
||||
processed[k] = struct{}{}
|
||||
}
|
||||
resources := map[jsonPointer]*resource{}
|
||||
for k, v := range r.resources {
|
||||
resources[k] = v.clone()
|
||||
}
|
||||
return &root{
|
||||
url: r.url,
|
||||
doc: r.doc,
|
||||
resources: resources,
|
||||
subschemasProcessed: processed,
|
||||
}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type resource struct {
|
||||
ptr jsonPointer
|
||||
id url
|
||||
dialect dialect
|
||||
anchors map[anchor]jsonPointer
|
||||
dynamicAnchors []anchor
|
||||
}
|
||||
|
||||
func newResource(ptr jsonPointer, id url) *resource {
|
||||
return &resource{ptr: ptr, id: id, anchors: make(map[anchor]jsonPointer)}
|
||||
}
|
||||
|
||||
func (res *resource) clone() *resource {
|
||||
anchors := map[anchor]jsonPointer{}
|
||||
for k, v := range res.anchors {
|
||||
anchors[k] = v
|
||||
}
|
||||
return &resource{
|
||||
ptr: res.ptr,
|
||||
id: res.id,
|
||||
dialect: res.dialect,
|
||||
anchors: anchors,
|
||||
dynamicAnchors: slices.Clone(res.dynamicAnchors),
|
||||
}
|
||||
}
|
||||
|
||||
//--
|
||||
|
||||
type UnsupportedVocabularyError struct {
|
||||
URL string
|
||||
Vocabulary string
|
||||
}
|
||||
|
||||
func (e *UnsupportedVocabularyError) Error() string {
|
||||
return fmt.Sprintf("unsupported vocabulary %q in %q", e.Vocabulary, e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type AnchorNotFoundError struct {
|
||||
URL string
|
||||
Reference string
|
||||
}
|
||||
|
||||
func (e *AnchorNotFoundError) Error() string {
|
||||
return fmt.Sprintf("anchor in %q not found in schema %q", e.Reference, e.URL)
|
||||
}
|
||||
289
vendor/github.com/santhosh-tekuri/jsonschema/v6/roots.go
generated
vendored
Normal file
289
vendor/github.com/santhosh-tekuri/jsonschema/v6/roots.go
generated
vendored
Normal file
|
|
@ -0,0 +1,289 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type roots struct {
|
||||
defaultDraft *Draft
|
||||
roots map[url]*root
|
||||
loader defaultLoader
|
||||
regexpEngine RegexpEngine
|
||||
vocabularies map[string]*Vocabulary
|
||||
assertVocabs bool
|
||||
}
|
||||
|
||||
func newRoots() *roots {
|
||||
return &roots{
|
||||
defaultDraft: draftLatest,
|
||||
roots: map[url]*root{},
|
||||
loader: defaultLoader{
|
||||
docs: map[url]any{},
|
||||
loader: FileLoader{},
|
||||
},
|
||||
regexpEngine: goRegexpCompile,
|
||||
vocabularies: map[string]*Vocabulary{},
|
||||
}
|
||||
}
|
||||
|
||||
func (rr *roots) orLoad(u url) (*root, error) {
|
||||
if r, ok := rr.roots[u]; ok {
|
||||
return r, nil
|
||||
}
|
||||
doc, err := rr.loader.load(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return rr.addRoot(u, doc)
|
||||
}
|
||||
|
||||
func (rr *roots) addRoot(u url, doc any) (*root, error) {
|
||||
r := &root{
|
||||
url: u,
|
||||
doc: doc,
|
||||
resources: map[jsonPointer]*resource{},
|
||||
subschemasProcessed: map[jsonPointer]struct{}{},
|
||||
}
|
||||
if err := rr.collectResources(r, doc, u, "", dialect{rr.defaultDraft, nil}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !strings.HasPrefix(u.String(), "http://json-schema.org/") &&
|
||||
!strings.HasPrefix(u.String(), "https://json-schema.org/") {
|
||||
if err := rr.validate(r, doc, ""); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
rr.roots[u] = r
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (rr *roots) resolveFragment(uf urlFrag) (urlPtr, error) {
|
||||
r, err := rr.orLoad(uf.url)
|
||||
if err != nil {
|
||||
return urlPtr{}, err
|
||||
}
|
||||
return r.resolveFragment(uf.frag)
|
||||
}
|
||||
|
||||
func (rr *roots) collectResources(r *root, sch any, base url, schPtr jsonPointer, fallback dialect) error {
|
||||
if _, ok := r.subschemasProcessed[schPtr]; ok {
|
||||
return nil
|
||||
}
|
||||
if err := rr._collectResources(r, sch, base, schPtr, fallback); err != nil {
|
||||
return err
|
||||
}
|
||||
r.subschemasProcessed[schPtr] = struct{}{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) _collectResources(r *root, sch any, base url, schPtr jsonPointer, fallback dialect) error {
|
||||
if _, ok := sch.(bool); ok {
|
||||
if schPtr.isEmpty() {
|
||||
// root resource
|
||||
res := newResource(schPtr, base)
|
||||
res.dialect = fallback
|
||||
r.resources[schPtr] = res
|
||||
}
|
||||
return nil
|
||||
}
|
||||
obj, ok := sch.(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
hasSchema := false
|
||||
if sch, ok := obj["$schema"]; ok {
|
||||
if _, ok := sch.(string); ok {
|
||||
hasSchema = true
|
||||
}
|
||||
}
|
||||
|
||||
draft, err := rr.loader.getDraft(urlPtr{r.url, schPtr}, sch, fallback.draft, map[url]struct{}{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
id := draft.getID(obj)
|
||||
if id == "" && !schPtr.isEmpty() {
|
||||
// ignore $schema
|
||||
draft = fallback.draft
|
||||
hasSchema = false
|
||||
id = draft.getID(obj)
|
||||
}
|
||||
|
||||
var res *resource
|
||||
if id != "" {
|
||||
uf, err := base.join(id)
|
||||
if err != nil {
|
||||
loc := urlPtr{r.url, schPtr}
|
||||
return &ParseIDError{loc.String()}
|
||||
}
|
||||
base = uf.url
|
||||
res = newResource(schPtr, base)
|
||||
} else if schPtr.isEmpty() {
|
||||
// root resource
|
||||
res = newResource(schPtr, base)
|
||||
}
|
||||
|
||||
if res != nil {
|
||||
found := false
|
||||
for _, res := range r.resources {
|
||||
if res.id == base {
|
||||
found = true
|
||||
if res.ptr != schPtr {
|
||||
return &DuplicateIDError{base.String(), r.url.String(), string(schPtr), string(res.ptr)}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
if hasSchema {
|
||||
vocabs, err := rr.loader.getMetaVocabs(sch, draft, rr.vocabularies)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res.dialect = dialect{draft, vocabs}
|
||||
} else {
|
||||
res.dialect = fallback
|
||||
}
|
||||
r.resources[schPtr] = res
|
||||
}
|
||||
}
|
||||
|
||||
var baseRes *resource
|
||||
for _, res := range r.resources {
|
||||
if res.id == base {
|
||||
baseRes = res
|
||||
break
|
||||
}
|
||||
}
|
||||
if baseRes == nil {
|
||||
panic("baseres is nil")
|
||||
}
|
||||
|
||||
// found base resource
|
||||
if err := r.collectAnchors(sch, schPtr, baseRes); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// process subschemas
|
||||
subschemas := map[jsonPointer]any{}
|
||||
for _, sp := range draft.subschemas {
|
||||
ss := sp.collect(obj, schPtr)
|
||||
for k, v := range ss {
|
||||
subschemas[k] = v
|
||||
}
|
||||
}
|
||||
for _, vocab := range baseRes.dialect.activeVocabs(true, rr.vocabularies) {
|
||||
if v := rr.vocabularies[vocab]; v != nil {
|
||||
for _, sp := range v.Subschemas {
|
||||
ss := sp.collect(obj, schPtr)
|
||||
for k, v := range ss {
|
||||
subschemas[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for ptr, v := range subschemas {
|
||||
if err := rr.collectResources(r, v, base, ptr, baseRes.dialect); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) ensureSubschema(up urlPtr) error {
|
||||
r, err := rr.orLoad(up.url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, ok := r.subschemasProcessed[up.ptr]; ok {
|
||||
return nil
|
||||
}
|
||||
v, err := up.lookup(r.doc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rClone := r.clone()
|
||||
if err := rr.addSubschema(rClone, up.ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := rr.validate(rClone, v, up.ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
rr.roots[r.url] = rClone
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) addSubschema(r *root, ptr jsonPointer) error {
|
||||
v, err := (&urlPtr{r.url, ptr}).lookup(r.doc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
base := r.resource(ptr)
|
||||
baseURL := base.id
|
||||
if err := rr.collectResources(r, v, baseURL, ptr, base.dialect); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// collect anchors
|
||||
if _, ok := r.resources[ptr]; !ok {
|
||||
res := r.resource(ptr)
|
||||
if err := r.collectAnchors(v, ptr, res); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rr *roots) validate(r *root, v any, ptr jsonPointer) error {
|
||||
dialect := r.resource(ptr).dialect
|
||||
meta := dialect.getSchema(rr.assertVocabs, rr.vocabularies)
|
||||
if err := meta.validate(v, rr.regexpEngine, meta, r.resources, rr.assertVocabs, rr.vocabularies); err != nil {
|
||||
up := urlPtr{r.url, ptr}
|
||||
return &SchemaValidationError{URL: up.String(), Err: err}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type InvalidMetaSchemaURLError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *InvalidMetaSchemaURLError) Error() string {
|
||||
return fmt.Sprintf("invalid $schema in %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type UnsupportedDraftError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *UnsupportedDraftError) Error() string {
|
||||
return fmt.Sprintf("draft %q is not supported", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MetaSchemaCycleError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *MetaSchemaCycleError) Error() string {
|
||||
return fmt.Sprintf("cycle in resolving $schema in %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type MetaSchemaMismatchError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *MetaSchemaMismatchError) Error() string {
|
||||
return fmt.Sprintf("$schema in %q does not match with $schema in root", e.URL)
|
||||
}
|
||||
248
vendor/github.com/santhosh-tekuri/jsonschema/v6/schema.go
generated
vendored
Normal file
248
vendor/github.com/santhosh-tekuri/jsonschema/v6/schema.go
generated
vendored
Normal file
|
|
@ -0,0 +1,248 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
)
|
||||
|
||||
// Schema is the regpresentation of a compiled
|
||||
// jsonschema.
|
||||
type Schema struct {
|
||||
up urlPtr
|
||||
resource *Schema
|
||||
dynamicAnchors map[string]*Schema
|
||||
allPropsEvaluated bool
|
||||
allItemsEvaluated bool
|
||||
numItemsEvaluated int
|
||||
|
||||
DraftVersion int
|
||||
Location string
|
||||
|
||||
// type agnostic --
|
||||
Bool *bool // boolean schema
|
||||
ID string
|
||||
Ref *Schema
|
||||
Anchor string
|
||||
RecursiveRef *Schema
|
||||
RecursiveAnchor bool
|
||||
DynamicRef *DynamicRef
|
||||
DynamicAnchor string // "" if not specified
|
||||
Types *Types
|
||||
Enum *Enum
|
||||
Const *any
|
||||
Not *Schema
|
||||
AllOf []*Schema
|
||||
AnyOf []*Schema
|
||||
OneOf []*Schema
|
||||
If *Schema
|
||||
Then *Schema
|
||||
Else *Schema
|
||||
Format *Format
|
||||
|
||||
// object --
|
||||
MaxProperties *int
|
||||
MinProperties *int
|
||||
Required []string
|
||||
PropertyNames *Schema
|
||||
Properties map[string]*Schema
|
||||
PatternProperties map[Regexp]*Schema
|
||||
AdditionalProperties any // nil or bool or *Schema
|
||||
Dependencies map[string]any // value is []string or *Schema
|
||||
DependentRequired map[string][]string
|
||||
DependentSchemas map[string]*Schema
|
||||
UnevaluatedProperties *Schema
|
||||
|
||||
// array --
|
||||
MinItems *int
|
||||
MaxItems *int
|
||||
UniqueItems bool
|
||||
Contains *Schema
|
||||
MinContains *int
|
||||
MaxContains *int
|
||||
Items any // nil or []*Schema or *Schema
|
||||
AdditionalItems any // nil or bool or *Schema
|
||||
PrefixItems []*Schema
|
||||
Items2020 *Schema
|
||||
UnevaluatedItems *Schema
|
||||
|
||||
// string --
|
||||
MinLength *int
|
||||
MaxLength *int
|
||||
Pattern Regexp
|
||||
ContentEncoding *Decoder
|
||||
ContentMediaType *MediaType
|
||||
ContentSchema *Schema
|
||||
|
||||
// number --
|
||||
Maximum *big.Rat
|
||||
Minimum *big.Rat
|
||||
ExclusiveMaximum *big.Rat
|
||||
ExclusiveMinimum *big.Rat
|
||||
MultipleOf *big.Rat
|
||||
|
||||
Extensions []SchemaExt
|
||||
|
||||
// annotations --
|
||||
Title string
|
||||
Description string
|
||||
Default *any
|
||||
Comment string
|
||||
ReadOnly bool
|
||||
WriteOnly bool
|
||||
Examples []any
|
||||
Deprecated bool
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type jsonType int
|
||||
|
||||
const (
|
||||
invalidType jsonType = 0
|
||||
nullType jsonType = 1 << iota
|
||||
booleanType
|
||||
numberType
|
||||
integerType
|
||||
stringType
|
||||
arrayType
|
||||
objectType
|
||||
)
|
||||
|
||||
func typeOf(v any) jsonType {
|
||||
switch v.(type) {
|
||||
case nil:
|
||||
return nullType
|
||||
case bool:
|
||||
return booleanType
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
return numberType
|
||||
case string:
|
||||
return stringType
|
||||
case []any:
|
||||
return arrayType
|
||||
case map[string]any:
|
||||
return objectType
|
||||
default:
|
||||
return invalidType
|
||||
}
|
||||
}
|
||||
|
||||
func typeFromString(s string) jsonType {
|
||||
switch s {
|
||||
case "null":
|
||||
return nullType
|
||||
case "boolean":
|
||||
return booleanType
|
||||
case "number":
|
||||
return numberType
|
||||
case "integer":
|
||||
return integerType
|
||||
case "string":
|
||||
return stringType
|
||||
case "array":
|
||||
return arrayType
|
||||
case "object":
|
||||
return objectType
|
||||
}
|
||||
return invalidType
|
||||
}
|
||||
|
||||
func (jt jsonType) String() string {
|
||||
switch jt {
|
||||
case nullType:
|
||||
return "null"
|
||||
case booleanType:
|
||||
return "boolean"
|
||||
case numberType:
|
||||
return "number"
|
||||
case integerType:
|
||||
return "integer"
|
||||
case stringType:
|
||||
return "string"
|
||||
case arrayType:
|
||||
return "array"
|
||||
case objectType:
|
||||
return "object"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// Types encapsulates list of json value types.
|
||||
type Types int
|
||||
|
||||
func newTypes(v any) *Types {
|
||||
var types Types
|
||||
switch v := v.(type) {
|
||||
case string:
|
||||
types.add(typeFromString(v))
|
||||
case []any:
|
||||
for _, item := range v {
|
||||
if s, ok := item.(string); ok {
|
||||
types.add(typeFromString(s))
|
||||
}
|
||||
}
|
||||
}
|
||||
if types.IsEmpty() {
|
||||
return nil
|
||||
}
|
||||
return &types
|
||||
}
|
||||
|
||||
func (tt Types) IsEmpty() bool {
|
||||
return tt == 0
|
||||
}
|
||||
|
||||
func (tt *Types) add(t jsonType) {
|
||||
*tt = Types(int(*tt) | int(t))
|
||||
}
|
||||
|
||||
func (tt Types) contains(t jsonType) bool {
|
||||
return int(tt)&int(t) != 0
|
||||
}
|
||||
|
||||
func (tt Types) ToStrings() []string {
|
||||
types := []jsonType{
|
||||
nullType, booleanType, numberType, integerType,
|
||||
stringType, arrayType, objectType,
|
||||
}
|
||||
var arr []string
|
||||
for _, t := range types {
|
||||
if tt.contains(t) {
|
||||
arr = append(arr, t.String())
|
||||
}
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
func (tt Types) String() string {
|
||||
return fmt.Sprintf("%v", tt.ToStrings())
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type Enum struct {
|
||||
Values []any
|
||||
types Types
|
||||
}
|
||||
|
||||
func newEnum(arr []any) *Enum {
|
||||
var types Types
|
||||
for _, item := range arr {
|
||||
types.add(typeOf(item))
|
||||
}
|
||||
return &Enum{arr, types}
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type DynamicRef struct {
|
||||
Ref *Schema
|
||||
Anchor string // "" if not specified
|
||||
}
|
||||
|
||||
func newSchema(up urlPtr) *Schema {
|
||||
return &Schema{up: up, Location: up.String()}
|
||||
}
|
||||
464
vendor/github.com/santhosh-tekuri/jsonschema/v6/util.go
generated
vendored
Normal file
464
vendor/github.com/santhosh-tekuri/jsonschema/v6/util.go
generated
vendored
Normal file
|
|
@ -0,0 +1,464 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"math/big"
|
||||
gourl "net/url"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6/kind"
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
// --
|
||||
|
||||
type url (string)
|
||||
|
||||
func (u url) String() string {
|
||||
return string(u)
|
||||
}
|
||||
|
||||
func (u url) join(ref string) (*urlFrag, error) {
|
||||
base, err := gourl.Parse(string(u))
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: u.String(), Err: err}
|
||||
}
|
||||
|
||||
ref, frag, err := splitFragment(ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
refURL, err := gourl.Parse(ref)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: ref, Err: err}
|
||||
}
|
||||
resolved := base.ResolveReference(refURL)
|
||||
|
||||
// see https://github.com/golang/go/issues/66084 (net/url: ResolveReference ignores Opaque value)
|
||||
if !refURL.IsAbs() && base.Opaque != "" {
|
||||
resolved.Opaque = base.Opaque
|
||||
}
|
||||
|
||||
return &urlFrag{url: url(resolved.String()), frag: frag}, nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type jsonPointer string
|
||||
|
||||
func escape(tok string) string {
|
||||
tok = strings.ReplaceAll(tok, "~", "~0")
|
||||
tok = strings.ReplaceAll(tok, "/", "~1")
|
||||
return tok
|
||||
}
|
||||
|
||||
func unescape(tok string) (string, bool) {
|
||||
tilde := strings.IndexByte(tok, '~')
|
||||
if tilde == -1 {
|
||||
return tok, true
|
||||
}
|
||||
sb := new(strings.Builder)
|
||||
for {
|
||||
sb.WriteString(tok[:tilde])
|
||||
tok = tok[tilde+1:]
|
||||
if tok == "" {
|
||||
return "", false
|
||||
}
|
||||
switch tok[0] {
|
||||
case '0':
|
||||
sb.WriteByte('~')
|
||||
case '1':
|
||||
sb.WriteByte('/')
|
||||
default:
|
||||
return "", false
|
||||
}
|
||||
tok = tok[1:]
|
||||
tilde = strings.IndexByte(tok, '~')
|
||||
if tilde == -1 {
|
||||
sb.WriteString(tok)
|
||||
break
|
||||
}
|
||||
}
|
||||
return sb.String(), true
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) isEmpty() bool {
|
||||
return string(ptr) == ""
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) concat(next jsonPointer) jsonPointer {
|
||||
return jsonPointer(fmt.Sprintf("%s%s", ptr, next))
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) append(tok string) jsonPointer {
|
||||
return jsonPointer(fmt.Sprintf("%s/%s", ptr, escape(tok)))
|
||||
}
|
||||
|
||||
func (ptr jsonPointer) append2(tok1, tok2 string) jsonPointer {
|
||||
return jsonPointer(fmt.Sprintf("%s/%s/%s", ptr, escape(tok1), escape(tok2)))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type anchor string
|
||||
|
||||
// --
|
||||
|
||||
type fragment string
|
||||
|
||||
func decode(frag string) (string, error) {
|
||||
return gourl.PathUnescape(frag)
|
||||
}
|
||||
|
||||
// avoids escaping /.
|
||||
func encode(frag string) string {
|
||||
var sb strings.Builder
|
||||
for i, tok := range strings.Split(frag, "/") {
|
||||
if i > 0 {
|
||||
sb.WriteByte('/')
|
||||
}
|
||||
sb.WriteString(gourl.PathEscape(tok))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func splitFragment(str string) (string, fragment, error) {
|
||||
u, f := split(str)
|
||||
f, err := decode(f)
|
||||
if err != nil {
|
||||
return "", fragment(""), &ParseURLError{URL: str, Err: err}
|
||||
}
|
||||
return u, fragment(f), nil
|
||||
}
|
||||
|
||||
func split(str string) (string, string) {
|
||||
hash := strings.IndexByte(str, '#')
|
||||
if hash == -1 {
|
||||
return str, ""
|
||||
}
|
||||
return str[:hash], str[hash+1:]
|
||||
}
|
||||
|
||||
func (frag fragment) convert() any {
|
||||
str := string(frag)
|
||||
if str == "" || strings.HasPrefix(str, "/") {
|
||||
return jsonPointer(str)
|
||||
}
|
||||
return anchor(str)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type urlFrag struct {
|
||||
url url
|
||||
frag fragment
|
||||
}
|
||||
|
||||
func startsWithWindowsDrive(s string) bool {
|
||||
if s != "" && strings.HasPrefix(s[1:], `:\`) {
|
||||
return (s[0] >= 'a' && s[0] <= 'z') || (s[0] >= 'A' && s[0] <= 'Z')
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func absolute(input string) (*urlFrag, error) {
|
||||
u, frag, err := splitFragment(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if windows absolute file path, convert to file url
|
||||
// because: net/url parses driver name as scheme
|
||||
if runtime.GOOS == "windows" && startsWithWindowsDrive(u) {
|
||||
u = "file:///" + filepath.ToSlash(u)
|
||||
}
|
||||
|
||||
gourl, err := gourl.Parse(u)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: input, Err: err}
|
||||
}
|
||||
if gourl.IsAbs() {
|
||||
return &urlFrag{url(u), frag}, nil
|
||||
}
|
||||
|
||||
// avoid filesystem api in wasm
|
||||
if runtime.GOOS != "js" {
|
||||
abs, err := filepath.Abs(u)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: input, Err: err}
|
||||
}
|
||||
u = abs
|
||||
}
|
||||
if !strings.HasPrefix(u, "/") {
|
||||
u = "/" + u
|
||||
}
|
||||
u = "file://" + filepath.ToSlash(u)
|
||||
|
||||
_, err = gourl.Parse(u)
|
||||
if err != nil {
|
||||
return nil, &ParseURLError{URL: input, Err: err}
|
||||
}
|
||||
return &urlFrag{url: url(u), frag: frag}, nil
|
||||
}
|
||||
|
||||
func (uf *urlFrag) String() string {
|
||||
return fmt.Sprintf("%s#%s", uf.url, encode(string(uf.frag)))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type urlPtr struct {
|
||||
url url
|
||||
ptr jsonPointer
|
||||
}
|
||||
|
||||
func (up *urlPtr) lookup(v any) (any, error) {
|
||||
for _, tok := range strings.Split(string(up.ptr), "/")[1:] {
|
||||
tok, ok := unescape(tok)
|
||||
if !ok {
|
||||
return nil, &InvalidJsonPointerError{up.String()}
|
||||
}
|
||||
switch val := v.(type) {
|
||||
case map[string]any:
|
||||
if pvalue, ok := val[tok]; ok {
|
||||
v = pvalue
|
||||
continue
|
||||
}
|
||||
case []any:
|
||||
if index, err := strconv.Atoi(tok); err == nil {
|
||||
if index >= 0 && index < len(val) {
|
||||
v = val[index]
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, &JSONPointerNotFoundError{up.String()}
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func (up *urlPtr) format(tok string) string {
|
||||
return fmt.Sprintf("%s#%s/%s", up.url, encode(string(up.ptr)), encode(escape(tok)))
|
||||
}
|
||||
|
||||
func (up *urlPtr) String() string {
|
||||
return fmt.Sprintf("%s#%s", up.url, encode(string(up.ptr)))
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
func minInt(i, j int) int {
|
||||
if i < j {
|
||||
return i
|
||||
}
|
||||
return j
|
||||
}
|
||||
|
||||
func strVal(obj map[string]any, prop string) (string, bool) {
|
||||
v, ok := obj[prop]
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
s, ok := v.(string)
|
||||
return s, ok
|
||||
}
|
||||
|
||||
func isInteger(num any) bool {
|
||||
rat, ok := new(big.Rat).SetString(fmt.Sprint(num))
|
||||
return ok && rat.IsInt()
|
||||
}
|
||||
|
||||
// quote returns single-quoted string.
|
||||
// used for embedding quoted strings in json.
|
||||
func quote(s string) string {
|
||||
s = fmt.Sprintf("%q", s)
|
||||
s = strings.ReplaceAll(s, `\"`, `"`)
|
||||
s = strings.ReplaceAll(s, `'`, `\'`)
|
||||
return "'" + s[1:len(s)-1] + "'"
|
||||
}
|
||||
|
||||
func equals(v1, v2 any) (bool, ErrorKind) {
|
||||
switch v1 := v1.(type) {
|
||||
case map[string]any:
|
||||
v2, ok := v2.(map[string]any)
|
||||
if !ok || len(v1) != len(v2) {
|
||||
return false, nil
|
||||
}
|
||||
for k, val1 := range v1 {
|
||||
val2, ok := v2[k]
|
||||
if !ok {
|
||||
return false, nil
|
||||
}
|
||||
if ok, k := equals(val1, val2); !ok || k != nil {
|
||||
return ok, k
|
||||
}
|
||||
}
|
||||
return true, nil
|
||||
case []any:
|
||||
v2, ok := v2.([]any)
|
||||
if !ok || len(v1) != len(v2) {
|
||||
return false, nil
|
||||
}
|
||||
for i := range v1 {
|
||||
if ok, k := equals(v1[i], v2[i]); !ok || k != nil {
|
||||
return ok, k
|
||||
}
|
||||
}
|
||||
return true, nil
|
||||
case nil:
|
||||
return v2 == nil, nil
|
||||
case bool:
|
||||
v2, ok := v2.(bool)
|
||||
return ok && v1 == v2, nil
|
||||
case string:
|
||||
v2, ok := v2.(string)
|
||||
return ok && v1 == v2, nil
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
num1, ok1 := new(big.Rat).SetString(fmt.Sprint(v1))
|
||||
num2, ok2 := new(big.Rat).SetString(fmt.Sprint(v2))
|
||||
return ok1 && ok2 && num1.Cmp(num2) == 0, nil
|
||||
default:
|
||||
return false, &kind.InvalidJsonValue{Value: v1}
|
||||
}
|
||||
}
|
||||
|
||||
func duplicates(arr []any) (int, int, ErrorKind) {
|
||||
if len(arr) <= 20 {
|
||||
for i := 1; i < len(arr); i++ {
|
||||
for j := 0; j < i; j++ {
|
||||
if ok, k := equals(arr[i], arr[j]); ok || k != nil {
|
||||
return j, i, k
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1, -1, nil
|
||||
}
|
||||
|
||||
m := make(map[uint64][]int)
|
||||
h := new(maphash.Hash)
|
||||
for i, item := range arr {
|
||||
h.Reset()
|
||||
writeHash(item, h)
|
||||
hash := h.Sum64()
|
||||
indexes, ok := m[hash]
|
||||
if ok {
|
||||
for _, j := range indexes {
|
||||
if ok, k := equals(item, arr[j]); ok || k != nil {
|
||||
return j, i, k
|
||||
}
|
||||
}
|
||||
}
|
||||
indexes = append(indexes, i)
|
||||
m[hash] = indexes
|
||||
}
|
||||
return -1, -1, nil
|
||||
}
|
||||
|
||||
func writeHash(v any, h *maphash.Hash) ErrorKind {
|
||||
switch v := v.(type) {
|
||||
case map[string]any:
|
||||
_ = h.WriteByte(0)
|
||||
props := make([]string, 0, len(v))
|
||||
for prop := range v {
|
||||
props = append(props, prop)
|
||||
}
|
||||
slices.Sort(props)
|
||||
for _, prop := range props {
|
||||
writeHash(prop, h)
|
||||
writeHash(v[prop], h)
|
||||
}
|
||||
case []any:
|
||||
_ = h.WriteByte(1)
|
||||
for _, item := range v {
|
||||
writeHash(item, h)
|
||||
}
|
||||
case nil:
|
||||
_ = h.WriteByte(2)
|
||||
case bool:
|
||||
_ = h.WriteByte(3)
|
||||
if v {
|
||||
_ = h.WriteByte(1)
|
||||
} else {
|
||||
_ = h.WriteByte(0)
|
||||
}
|
||||
case string:
|
||||
_ = h.WriteByte(4)
|
||||
_, _ = h.WriteString(v)
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
_ = h.WriteByte(5)
|
||||
num, _ := new(big.Rat).SetString(fmt.Sprint(v))
|
||||
_, _ = h.Write(num.Num().Bytes())
|
||||
_, _ = h.Write(num.Denom().Bytes())
|
||||
default:
|
||||
return &kind.InvalidJsonValue{Value: v}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ParseURLError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *ParseURLError) Error() string {
|
||||
return fmt.Sprintf("error in parsing %q: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type InvalidJsonPointerError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *InvalidJsonPointerError) Error() string {
|
||||
return fmt.Sprintf("invalid json-pointer %q", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type JSONPointerNotFoundError struct {
|
||||
URL string
|
||||
}
|
||||
|
||||
func (e *JSONPointerNotFoundError) Error() string {
|
||||
return fmt.Sprintf("json-pointer in %q not found", e.URL)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type SchemaValidationError struct {
|
||||
URL string
|
||||
Err error
|
||||
}
|
||||
|
||||
func (e *SchemaValidationError) Error() string {
|
||||
return fmt.Sprintf("%q is not valid against metaschema: %v", e.URL, e.Err)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// LocalizableError is an error whose message is localizable.
|
||||
func LocalizableError(format string, args ...any) error {
|
||||
return &localizableError{format, args}
|
||||
}
|
||||
|
||||
type localizableError struct {
|
||||
msg string
|
||||
args []any
|
||||
}
|
||||
|
||||
func (e *localizableError) Error() string {
|
||||
return fmt.Sprintf(e.msg, e.args...)
|
||||
}
|
||||
|
||||
func (e *localizableError) LocalizedError(p *message.Printer) string {
|
||||
return p.Sprintf(e.msg, e.args...)
|
||||
}
|
||||
975
vendor/github.com/santhosh-tekuri/jsonschema/v6/validator.go
generated
vendored
Normal file
975
vendor/github.com/santhosh-tekuri/jsonschema/v6/validator.go
generated
vendored
Normal file
|
|
@ -0,0 +1,975 @@
|
|||
package jsonschema
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"slices"
|
||||
"strconv"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/santhosh-tekuri/jsonschema/v6/kind"
|
||||
"golang.org/x/text/message"
|
||||
)
|
||||
|
||||
func (sch *Schema) Validate(v any) error {
|
||||
return sch.validate(v, nil, nil, nil, false, nil)
|
||||
}
|
||||
|
||||
func (sch *Schema) validate(v any, regexpEngine RegexpEngine, meta *Schema, resources map[jsonPointer]*resource, assertVocabs bool, vocabularies map[string]*Vocabulary) error {
|
||||
vd := validator{
|
||||
v: v,
|
||||
vloc: make([]string, 0, 8),
|
||||
sch: sch,
|
||||
scp: &scope{sch, "", 0, nil},
|
||||
uneval: unevalFrom(v, sch, false),
|
||||
errors: nil,
|
||||
boolResult: false,
|
||||
regexpEngine: regexpEngine,
|
||||
meta: meta,
|
||||
resources: resources,
|
||||
assertVocabs: assertVocabs,
|
||||
vocabularies: vocabularies,
|
||||
}
|
||||
if _, err := vd.validate(); err != nil {
|
||||
verr := err.(*ValidationError)
|
||||
var causes []*ValidationError
|
||||
if _, ok := verr.ErrorKind.(*kind.Group); ok {
|
||||
causes = verr.Causes
|
||||
} else {
|
||||
causes = []*ValidationError{verr}
|
||||
}
|
||||
return &ValidationError{
|
||||
SchemaURL: sch.Location,
|
||||
InstanceLocation: nil,
|
||||
ErrorKind: &kind.Schema{Location: sch.Location},
|
||||
Causes: causes,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type validator struct {
|
||||
v any
|
||||
vloc []string
|
||||
sch *Schema
|
||||
scp *scope
|
||||
uneval *uneval
|
||||
errors []*ValidationError
|
||||
boolResult bool // is interested to know valid or not (but not actuall error)
|
||||
regexpEngine RegexpEngine
|
||||
|
||||
// meta validation
|
||||
meta *Schema // set only when validating with metaschema
|
||||
resources map[jsonPointer]*resource // resources which should be validated with their dialect
|
||||
assertVocabs bool
|
||||
vocabularies map[string]*Vocabulary
|
||||
}
|
||||
|
||||
func (vd *validator) validate() (*uneval, error) {
|
||||
s := vd.sch
|
||||
v := vd.v
|
||||
|
||||
// boolean --
|
||||
if s.Bool != nil {
|
||||
if *s.Bool {
|
||||
return vd.uneval, nil
|
||||
} else {
|
||||
return nil, vd.error(&kind.FalseSchema{})
|
||||
}
|
||||
}
|
||||
|
||||
// check cycle --
|
||||
if scp := vd.scp.checkCycle(); scp != nil {
|
||||
return nil, vd.error(&kind.RefCycle{
|
||||
URL: s.Location,
|
||||
KeywordLocation1: vd.scp.kwLoc(),
|
||||
KeywordLocation2: scp.kwLoc(),
|
||||
})
|
||||
}
|
||||
|
||||
t := typeOf(v)
|
||||
if t == invalidType {
|
||||
return nil, vd.error(&kind.InvalidJsonValue{Value: v})
|
||||
}
|
||||
|
||||
// type --
|
||||
if s.Types != nil && !s.Types.IsEmpty() {
|
||||
matched := s.Types.contains(t) || (s.Types.contains(integerType) && t == numberType && isInteger(v))
|
||||
if !matched {
|
||||
return nil, vd.error(&kind.Type{Got: t.String(), Want: s.Types.ToStrings()})
|
||||
}
|
||||
}
|
||||
|
||||
// const --
|
||||
if s.Const != nil {
|
||||
ok, k := equals(v, *s.Const)
|
||||
if k != nil {
|
||||
return nil, vd.error(k)
|
||||
} else if !ok {
|
||||
return nil, vd.error(&kind.Const{Got: v, Want: *s.Const})
|
||||
}
|
||||
}
|
||||
|
||||
// enum --
|
||||
if s.Enum != nil {
|
||||
matched := s.Enum.types.contains(typeOf(v))
|
||||
if matched {
|
||||
matched = false
|
||||
for _, item := range s.Enum.Values {
|
||||
ok, k := equals(v, item)
|
||||
if k != nil {
|
||||
return nil, vd.error(k)
|
||||
} else if ok {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
return nil, vd.error(&kind.Enum{Got: v, Want: s.Enum.Values})
|
||||
}
|
||||
}
|
||||
|
||||
// format --
|
||||
if s.Format != nil {
|
||||
var err error
|
||||
if s.Format.Name == "regex" && vd.regexpEngine != nil {
|
||||
err = vd.regexpEngine.validate(v)
|
||||
} else {
|
||||
err = s.Format.Validate(v)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, vd.error(&kind.Format{Got: v, Want: s.Format.Name, Err: err})
|
||||
}
|
||||
}
|
||||
|
||||
// $ref --
|
||||
if s.Ref != nil {
|
||||
err := vd.validateRef(s.Ref, "$ref")
|
||||
if s.DraftVersion < 2019 {
|
||||
return vd.uneval, err
|
||||
}
|
||||
if err != nil {
|
||||
vd.addErr(err)
|
||||
}
|
||||
}
|
||||
|
||||
// type specific validations --
|
||||
switch v := v.(type) {
|
||||
case map[string]any:
|
||||
vd.objValidate(v)
|
||||
case []any:
|
||||
vd.arrValidate(v)
|
||||
case string:
|
||||
vd.strValidate(v)
|
||||
case json.Number, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
|
||||
vd.numValidate(v)
|
||||
}
|
||||
|
||||
if len(vd.errors) == 0 || !vd.boolResult {
|
||||
if s.DraftVersion >= 2019 {
|
||||
vd.validateRefs()
|
||||
}
|
||||
vd.condValidate()
|
||||
|
||||
for _, ext := range s.Extensions {
|
||||
ext.Validate(&ValidatorContext{vd}, v)
|
||||
}
|
||||
|
||||
if s.DraftVersion >= 2019 {
|
||||
vd.unevalValidate()
|
||||
}
|
||||
}
|
||||
|
||||
switch len(vd.errors) {
|
||||
case 0:
|
||||
return vd.uneval, nil
|
||||
case 1:
|
||||
return nil, vd.errors[0]
|
||||
default:
|
||||
verr := vd.error(&kind.Group{})
|
||||
verr.Causes = vd.errors
|
||||
return nil, verr
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) objValidate(obj map[string]any) {
|
||||
s := vd.sch
|
||||
|
||||
// minProperties --
|
||||
if s.MinProperties != nil {
|
||||
if len(obj) < *s.MinProperties {
|
||||
vd.addError(&kind.MinProperties{Got: len(obj), Want: *s.MinProperties})
|
||||
}
|
||||
}
|
||||
|
||||
// maxProperties --
|
||||
if s.MaxProperties != nil {
|
||||
if len(obj) > *s.MaxProperties {
|
||||
vd.addError(&kind.MaxProperties{Got: len(obj), Want: *s.MaxProperties})
|
||||
}
|
||||
}
|
||||
|
||||
// required --
|
||||
if len(s.Required) > 0 {
|
||||
if missing := vd.findMissing(obj, s.Required); missing != nil {
|
||||
vd.addError(&kind.Required{Missing: missing})
|
||||
}
|
||||
}
|
||||
|
||||
if vd.boolResult && len(vd.errors) > 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// dependencies --
|
||||
for pname, dep := range s.Dependencies {
|
||||
if _, ok := obj[pname]; ok {
|
||||
switch dep := dep.(type) {
|
||||
case []string:
|
||||
if missing := vd.findMissing(obj, dep); missing != nil {
|
||||
vd.addError(&kind.Dependency{Prop: pname, Missing: missing})
|
||||
}
|
||||
case *Schema:
|
||||
vd.addErr(vd.validateSelf(dep, "", false))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var additionalPros []string
|
||||
for pname, pvalue := range obj {
|
||||
if vd.boolResult && len(vd.errors) > 0 {
|
||||
return
|
||||
}
|
||||
evaluated := false
|
||||
|
||||
// properties --
|
||||
if sch, ok := s.Properties[pname]; ok {
|
||||
evaluated = true
|
||||
vd.addErr(vd.validateVal(sch, pvalue, pname))
|
||||
}
|
||||
|
||||
// patternProperties --
|
||||
for regex, sch := range s.PatternProperties {
|
||||
if regex.MatchString(pname) {
|
||||
evaluated = true
|
||||
vd.addErr(vd.validateVal(sch, pvalue, pname))
|
||||
}
|
||||
}
|
||||
|
||||
if !evaluated && s.AdditionalProperties != nil {
|
||||
evaluated = true
|
||||
switch additional := s.AdditionalProperties.(type) {
|
||||
case bool:
|
||||
if !additional {
|
||||
additionalPros = append(additionalPros, pname)
|
||||
}
|
||||
case *Schema:
|
||||
vd.addErr(vd.validateVal(additional, pvalue, pname))
|
||||
}
|
||||
}
|
||||
|
||||
if evaluated {
|
||||
delete(vd.uneval.props, pname)
|
||||
}
|
||||
}
|
||||
if len(additionalPros) > 0 {
|
||||
vd.addError(&kind.AdditionalProperties{Properties: additionalPros})
|
||||
}
|
||||
|
||||
if s.DraftVersion == 4 {
|
||||
return
|
||||
}
|
||||
|
||||
// propertyNames --
|
||||
if s.PropertyNames != nil {
|
||||
for pname := range obj {
|
||||
sch, meta, resources := s.PropertyNames, vd.meta, vd.resources
|
||||
res := vd.metaResource(sch)
|
||||
if res != nil {
|
||||
meta = res.dialect.getSchema(vd.assertVocabs, vd.vocabularies)
|
||||
sch = meta
|
||||
}
|
||||
if err := sch.validate(pname, vd.regexpEngine, meta, resources, vd.assertVocabs, vd.vocabularies); err != nil {
|
||||
verr := err.(*ValidationError)
|
||||
verr.SchemaURL = s.PropertyNames.Location
|
||||
verr.ErrorKind = &kind.PropertyNames{Property: pname}
|
||||
vd.addErr(verr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if s.DraftVersion == 6 {
|
||||
return
|
||||
}
|
||||
|
||||
// dependentSchemas --
|
||||
for pname, sch := range s.DependentSchemas {
|
||||
if _, ok := obj[pname]; ok {
|
||||
vd.addErr(vd.validateSelf(sch, "", false))
|
||||
}
|
||||
}
|
||||
|
||||
// dependentRequired --
|
||||
for pname, reqd := range s.DependentRequired {
|
||||
if _, ok := obj[pname]; ok {
|
||||
if missing := vd.findMissing(obj, reqd); missing != nil {
|
||||
vd.addError(&kind.DependentRequired{Prop: pname, Missing: missing})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) arrValidate(arr []any) {
|
||||
s := vd.sch
|
||||
|
||||
// minItems --
|
||||
if s.MinItems != nil {
|
||||
if len(arr) < *s.MinItems {
|
||||
vd.addError(&kind.MinItems{Got: len(arr), Want: *s.MinItems})
|
||||
}
|
||||
}
|
||||
|
||||
// maxItems --
|
||||
if s.MaxItems != nil {
|
||||
if len(arr) > *s.MaxItems {
|
||||
vd.addError(&kind.MaxItems{Got: len(arr), Want: *s.MaxItems})
|
||||
}
|
||||
}
|
||||
|
||||
// uniqueItems --
|
||||
if s.UniqueItems && len(arr) > 1 {
|
||||
i, j, k := duplicates(arr)
|
||||
if k != nil {
|
||||
vd.addError(k)
|
||||
} else if i != -1 {
|
||||
vd.addError(&kind.UniqueItems{Duplicates: [2]int{i, j}})
|
||||
}
|
||||
}
|
||||
|
||||
if s.DraftVersion < 2020 {
|
||||
evaluated := 0
|
||||
|
||||
// items --
|
||||
switch items := s.Items.(type) {
|
||||
case *Schema:
|
||||
for i, item := range arr {
|
||||
vd.addErr(vd.validateVal(items, item, strconv.Itoa(i)))
|
||||
}
|
||||
evaluated = len(arr)
|
||||
case []*Schema:
|
||||
min := minInt(len(arr), len(items))
|
||||
for i, item := range arr[:min] {
|
||||
vd.addErr(vd.validateVal(items[i], item, strconv.Itoa(i)))
|
||||
}
|
||||
evaluated = min
|
||||
}
|
||||
|
||||
// additionalItems --
|
||||
if s.AdditionalItems != nil {
|
||||
switch additional := s.AdditionalItems.(type) {
|
||||
case bool:
|
||||
if !additional && evaluated != len(arr) {
|
||||
vd.addError(&kind.AdditionalItems{Count: len(arr) - evaluated})
|
||||
}
|
||||
case *Schema:
|
||||
for i, item := range arr[evaluated:] {
|
||||
vd.addErr(vd.validateVal(additional, item, strconv.Itoa(i)))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
evaluated := minInt(len(s.PrefixItems), len(arr))
|
||||
|
||||
// prefixItems --
|
||||
for i, item := range arr[:evaluated] {
|
||||
vd.addErr(vd.validateVal(s.PrefixItems[i], item, strconv.Itoa(i)))
|
||||
}
|
||||
|
||||
// items2020 --
|
||||
if s.Items2020 != nil {
|
||||
for i, item := range arr[evaluated:] {
|
||||
vd.addErr(vd.validateVal(s.Items2020, item, strconv.Itoa(i)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// contains --
|
||||
if s.Contains != nil {
|
||||
var errors []*ValidationError
|
||||
var matched []int
|
||||
|
||||
for i, item := range arr {
|
||||
if err := vd.validateVal(s.Contains, item, strconv.Itoa(i)); err != nil {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
} else {
|
||||
matched = append(matched, i)
|
||||
if s.DraftVersion >= 2020 {
|
||||
delete(vd.uneval.items, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// minContains --
|
||||
if s.MinContains != nil {
|
||||
if len(matched) < *s.MinContains {
|
||||
vd.addErrors(errors, &kind.MinContains{Got: matched, Want: *s.MinContains})
|
||||
}
|
||||
} else if len(matched) == 0 {
|
||||
vd.addErrors(errors, &kind.Contains{})
|
||||
}
|
||||
|
||||
// maxContains --
|
||||
if s.MaxContains != nil {
|
||||
if len(matched) > *s.MaxContains {
|
||||
vd.addError(&kind.MaxContains{Got: matched, Want: *s.MaxContains})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) strValidate(str string) {
|
||||
s := vd.sch
|
||||
|
||||
strLen := -1
|
||||
if s.MinLength != nil || s.MaxLength != nil {
|
||||
strLen = utf8.RuneCount([]byte(str))
|
||||
}
|
||||
|
||||
// minLength --
|
||||
if s.MinLength != nil {
|
||||
if strLen < *s.MinLength {
|
||||
vd.addError(&kind.MinLength{Got: strLen, Want: *s.MinLength})
|
||||
}
|
||||
}
|
||||
|
||||
// maxLength --
|
||||
if s.MaxLength != nil {
|
||||
if strLen > *s.MaxLength {
|
||||
vd.addError(&kind.MaxLength{Got: strLen, Want: *s.MaxLength})
|
||||
}
|
||||
}
|
||||
|
||||
// pattern --
|
||||
if s.Pattern != nil {
|
||||
if !s.Pattern.MatchString(str) {
|
||||
vd.addError(&kind.Pattern{Got: str, Want: s.Pattern.String()})
|
||||
}
|
||||
}
|
||||
|
||||
if s.DraftVersion == 6 {
|
||||
return
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
// contentEncoding --
|
||||
decoded := []byte(str)
|
||||
if s.ContentEncoding != nil {
|
||||
decoded, err = s.ContentEncoding.Decode(str)
|
||||
if err != nil {
|
||||
decoded = nil
|
||||
vd.addError(&kind.ContentEncoding{Want: s.ContentEncoding.Name, Err: err})
|
||||
}
|
||||
}
|
||||
|
||||
var deserialized *any
|
||||
if decoded != nil && s.ContentMediaType != nil {
|
||||
if s.ContentSchema == nil {
|
||||
err = s.ContentMediaType.Validate(decoded)
|
||||
} else {
|
||||
var value any
|
||||
value, err = s.ContentMediaType.UnmarshalJSON(decoded)
|
||||
if err == nil {
|
||||
deserialized = &value
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
vd.addError(&kind.ContentMediaType{
|
||||
Got: decoded,
|
||||
Want: s.ContentMediaType.Name,
|
||||
Err: err,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if deserialized != nil && s.ContentSchema != nil {
|
||||
sch, meta, resources := s.ContentSchema, vd.meta, vd.resources
|
||||
res := vd.metaResource(sch)
|
||||
if res != nil {
|
||||
meta = res.dialect.getSchema(vd.assertVocabs, vd.vocabularies)
|
||||
sch = meta
|
||||
}
|
||||
if err = sch.validate(*deserialized, vd.regexpEngine, meta, resources, vd.assertVocabs, vd.vocabularies); err != nil {
|
||||
verr := err.(*ValidationError)
|
||||
verr.SchemaURL = s.Location
|
||||
verr.ErrorKind = &kind.ContentSchema{}
|
||||
vd.addErr(verr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) numValidate(v any) {
|
||||
s := vd.sch
|
||||
|
||||
var numVal *big.Rat
|
||||
num := func() *big.Rat {
|
||||
if numVal == nil {
|
||||
numVal, _ = new(big.Rat).SetString(fmt.Sprintf("%v", v))
|
||||
}
|
||||
return numVal
|
||||
}
|
||||
|
||||
// minimum --
|
||||
if s.Minimum != nil && num().Cmp(s.Minimum) < 0 {
|
||||
vd.addError(&kind.Minimum{Got: num(), Want: s.Minimum})
|
||||
}
|
||||
|
||||
// maximum --
|
||||
if s.Maximum != nil && num().Cmp(s.Maximum) > 0 {
|
||||
vd.addError(&kind.Maximum{Got: num(), Want: s.Maximum})
|
||||
}
|
||||
|
||||
// exclusiveMinimum
|
||||
if s.ExclusiveMinimum != nil && num().Cmp(s.ExclusiveMinimum) <= 0 {
|
||||
vd.addError(&kind.ExclusiveMinimum{Got: num(), Want: s.ExclusiveMinimum})
|
||||
}
|
||||
|
||||
// exclusiveMaximum
|
||||
if s.ExclusiveMaximum != nil && num().Cmp(s.ExclusiveMaximum) >= 0 {
|
||||
vd.addError(&kind.ExclusiveMaximum{Got: num(), Want: s.ExclusiveMaximum})
|
||||
}
|
||||
|
||||
// multipleOf
|
||||
if s.MultipleOf != nil {
|
||||
if q := new(big.Rat).Quo(num(), s.MultipleOf); !q.IsInt() {
|
||||
vd.addError(&kind.MultipleOf{Got: num(), Want: s.MultipleOf})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) condValidate() {
|
||||
s := vd.sch
|
||||
|
||||
// not --
|
||||
if s.Not != nil {
|
||||
if vd.validateSelf(s.Not, "", true) == nil {
|
||||
vd.addError(&kind.Not{})
|
||||
}
|
||||
}
|
||||
|
||||
// allOf --
|
||||
if len(s.AllOf) > 0 {
|
||||
var errors []*ValidationError
|
||||
for _, sch := range s.AllOf {
|
||||
if err := vd.validateSelf(sch, "", false); err != nil {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
if vd.boolResult {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(errors) != 0 {
|
||||
vd.addErrors(errors, &kind.AllOf{})
|
||||
}
|
||||
}
|
||||
|
||||
// anyOf
|
||||
if len(s.AnyOf) > 0 {
|
||||
var matched bool
|
||||
var errors []*ValidationError
|
||||
for _, sch := range s.AnyOf {
|
||||
if err := vd.validateSelf(sch, "", false); err != nil {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
} else {
|
||||
matched = true
|
||||
// for uneval, all schemas must be evaluated
|
||||
if vd.uneval.isEmpty() {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
vd.addErrors(errors, &kind.AnyOf{})
|
||||
}
|
||||
}
|
||||
|
||||
// oneOf
|
||||
if len(s.OneOf) > 0 {
|
||||
var matched = -1
|
||||
var errors []*ValidationError
|
||||
for i, sch := range s.OneOf {
|
||||
if err := vd.validateSelf(sch, "", matched != -1); err != nil {
|
||||
if matched == -1 {
|
||||
errors = append(errors, err.(*ValidationError))
|
||||
}
|
||||
} else {
|
||||
if matched == -1 {
|
||||
matched = i
|
||||
} else {
|
||||
vd.addError(&kind.OneOf{Subschemas: []int{matched, i}})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if matched == -1 {
|
||||
vd.addErrors(errors, &kind.OneOf{Subschemas: nil})
|
||||
}
|
||||
}
|
||||
|
||||
// if, then, else --
|
||||
if s.If != nil {
|
||||
if vd.validateSelf(s.If, "", true) == nil {
|
||||
if s.Then != nil {
|
||||
vd.addErr(vd.validateSelf(s.Then, "", false))
|
||||
}
|
||||
} else if s.Else != nil {
|
||||
vd.addErr(vd.validateSelf(s.Else, "", false))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) unevalValidate() {
|
||||
s := vd.sch
|
||||
|
||||
// unevaluatedProperties
|
||||
if obj, ok := vd.v.(map[string]any); ok && s.UnevaluatedProperties != nil {
|
||||
for pname := range vd.uneval.props {
|
||||
if pvalue, ok := obj[pname]; ok {
|
||||
vd.addErr(vd.validateVal(s.UnevaluatedProperties, pvalue, pname))
|
||||
}
|
||||
}
|
||||
vd.uneval.props = nil
|
||||
}
|
||||
|
||||
// unevaluatedItems
|
||||
if arr, ok := vd.v.([]any); ok && s.UnevaluatedItems != nil {
|
||||
for i := range vd.uneval.items {
|
||||
vd.addErr(vd.validateVal(s.UnevaluatedItems, arr[i], strconv.Itoa(i)))
|
||||
}
|
||||
vd.uneval.items = nil
|
||||
}
|
||||
}
|
||||
|
||||
// validation helpers --
|
||||
|
||||
func (vd *validator) validateSelf(sch *Schema, refKw string, boolResult bool) error {
|
||||
scp := vd.scp.child(sch, refKw, vd.scp.vid)
|
||||
uneval := unevalFrom(vd.v, sch, !vd.uneval.isEmpty())
|
||||
subvd := validator{
|
||||
v: vd.v,
|
||||
vloc: vd.vloc,
|
||||
sch: sch,
|
||||
scp: scp,
|
||||
uneval: uneval,
|
||||
errors: nil,
|
||||
boolResult: vd.boolResult || boolResult,
|
||||
regexpEngine: vd.regexpEngine,
|
||||
meta: vd.meta,
|
||||
resources: vd.resources,
|
||||
assertVocabs: vd.assertVocabs,
|
||||
vocabularies: vd.vocabularies,
|
||||
}
|
||||
subvd.handleMeta()
|
||||
uneval, err := subvd.validate()
|
||||
if err == nil {
|
||||
vd.uneval.merge(uneval)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (vd *validator) validateVal(sch *Schema, v any, vtok string) error {
|
||||
vloc := append(vd.vloc, vtok)
|
||||
scp := vd.scp.child(sch, "", vd.scp.vid+1)
|
||||
uneval := unevalFrom(v, sch, false)
|
||||
subvd := validator{
|
||||
v: v,
|
||||
vloc: vloc,
|
||||
sch: sch,
|
||||
scp: scp,
|
||||
uneval: uneval,
|
||||
errors: nil,
|
||||
boolResult: vd.boolResult,
|
||||
regexpEngine: vd.regexpEngine,
|
||||
meta: vd.meta,
|
||||
resources: vd.resources,
|
||||
assertVocabs: vd.assertVocabs,
|
||||
vocabularies: vd.vocabularies,
|
||||
}
|
||||
subvd.handleMeta()
|
||||
_, err := subvd.validate()
|
||||
return err
|
||||
}
|
||||
|
||||
func (vd *validator) validateValue(sch *Schema, v any, vpath []string) error {
|
||||
vloc := append(vd.vloc, vpath...)
|
||||
scp := vd.scp.child(sch, "", vd.scp.vid+1)
|
||||
uneval := unevalFrom(v, sch, false)
|
||||
subvd := validator{
|
||||
v: v,
|
||||
vloc: vloc,
|
||||
sch: sch,
|
||||
scp: scp,
|
||||
uneval: uneval,
|
||||
errors: nil,
|
||||
boolResult: vd.boolResult,
|
||||
regexpEngine: vd.regexpEngine,
|
||||
meta: vd.meta,
|
||||
resources: vd.resources,
|
||||
assertVocabs: vd.assertVocabs,
|
||||
vocabularies: vd.vocabularies,
|
||||
}
|
||||
subvd.handleMeta()
|
||||
_, err := subvd.validate()
|
||||
return err
|
||||
}
|
||||
|
||||
func (vd *validator) metaResource(sch *Schema) *resource {
|
||||
if sch != vd.meta {
|
||||
return nil
|
||||
}
|
||||
ptr := ""
|
||||
for _, tok := range vd.instanceLocation() {
|
||||
ptr += "/"
|
||||
ptr += escape(tok)
|
||||
}
|
||||
return vd.resources[jsonPointer(ptr)]
|
||||
}
|
||||
|
||||
func (vd *validator) handleMeta() {
|
||||
res := vd.metaResource(vd.sch)
|
||||
if res == nil {
|
||||
return
|
||||
}
|
||||
sch := res.dialect.getSchema(vd.assertVocabs, vd.vocabularies)
|
||||
vd.meta = sch
|
||||
vd.sch = sch
|
||||
}
|
||||
|
||||
// reference validation --
|
||||
|
||||
func (vd *validator) validateRef(sch *Schema, kw string) error {
|
||||
err := vd.validateSelf(sch, kw, false)
|
||||
if err != nil {
|
||||
refErr := vd.error(&kind.Reference{Keyword: kw, URL: sch.Location})
|
||||
verr := err.(*ValidationError)
|
||||
if _, ok := verr.ErrorKind.(*kind.Group); ok {
|
||||
refErr.Causes = verr.Causes
|
||||
} else {
|
||||
refErr.Causes = append(refErr.Causes, verr)
|
||||
}
|
||||
return refErr
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (vd *validator) resolveRecursiveAnchor(fallback *Schema) *Schema {
|
||||
sch := fallback
|
||||
scp := vd.scp
|
||||
for scp != nil {
|
||||
if scp.sch.resource.RecursiveAnchor {
|
||||
sch = scp.sch
|
||||
}
|
||||
scp = scp.parent
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (vd *validator) resolveDynamicAnchor(name string, fallback *Schema) *Schema {
|
||||
sch := fallback
|
||||
scp := vd.scp
|
||||
for scp != nil {
|
||||
if dsch, ok := scp.sch.resource.dynamicAnchors[name]; ok {
|
||||
sch = dsch
|
||||
}
|
||||
scp = scp.parent
|
||||
}
|
||||
return sch
|
||||
}
|
||||
|
||||
func (vd *validator) validateRefs() {
|
||||
// $recursiveRef --
|
||||
if sch := vd.sch.RecursiveRef; sch != nil {
|
||||
if sch.RecursiveAnchor {
|
||||
sch = vd.resolveRecursiveAnchor(sch)
|
||||
}
|
||||
vd.addErr(vd.validateRef(sch, "$recursiveRef"))
|
||||
}
|
||||
|
||||
// $dynamicRef --
|
||||
if dref := vd.sch.DynamicRef; dref != nil {
|
||||
sch := dref.Ref // initial target
|
||||
if dref.Anchor != "" {
|
||||
// $dynamicRef includes anchor
|
||||
if sch.DynamicAnchor == dref.Anchor {
|
||||
// initial target has matching $dynamicAnchor
|
||||
sch = vd.resolveDynamicAnchor(dref.Anchor, sch)
|
||||
}
|
||||
}
|
||||
vd.addErr(vd.validateRef(sch, "$dynamicRef"))
|
||||
}
|
||||
}
|
||||
|
||||
// error helpers --
|
||||
|
||||
func (vd *validator) instanceLocation() []string {
|
||||
return slices.Clone(vd.vloc)
|
||||
}
|
||||
|
||||
func (vd *validator) error(kind ErrorKind) *ValidationError {
|
||||
if vd.boolResult {
|
||||
return &ValidationError{}
|
||||
}
|
||||
return &ValidationError{
|
||||
SchemaURL: vd.sch.Location,
|
||||
InstanceLocation: vd.instanceLocation(),
|
||||
ErrorKind: kind,
|
||||
Causes: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) addErr(err error) {
|
||||
if err != nil {
|
||||
vd.errors = append(vd.errors, err.(*ValidationError))
|
||||
}
|
||||
}
|
||||
|
||||
func (vd *validator) addError(kind ErrorKind) {
|
||||
vd.errors = append(vd.errors, vd.error(kind))
|
||||
}
|
||||
|
||||
func (vd *validator) addErrors(errors []*ValidationError, kind ErrorKind) {
|
||||
err := vd.error(kind)
|
||||
err.Causes = errors
|
||||
vd.errors = append(vd.errors, err)
|
||||
}
|
||||
|
||||
func (vd *validator) findMissing(obj map[string]any, reqd []string) []string {
|
||||
var missing []string
|
||||
for _, pname := range reqd {
|
||||
if _, ok := obj[pname]; !ok {
|
||||
if vd.boolResult {
|
||||
return []string{} // non-nil
|
||||
}
|
||||
missing = append(missing, pname)
|
||||
}
|
||||
}
|
||||
return missing
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type scope struct {
|
||||
sch *Schema
|
||||
|
||||
// if empty, compute from self.sch and self.parent.sch.
|
||||
// not empty, only when there is a jump i.e, $ref, $XXXRef
|
||||
refKeyword string
|
||||
|
||||
// unique id of value being validated
|
||||
// if two scopes validate same value, they will have
|
||||
// same vid
|
||||
vid int
|
||||
|
||||
parent *scope
|
||||
}
|
||||
|
||||
func (sc *scope) child(sch *Schema, refKeyword string, vid int) *scope {
|
||||
return &scope{sch, refKeyword, vid, sc}
|
||||
}
|
||||
|
||||
func (sc *scope) checkCycle() *scope {
|
||||
scp := sc.parent
|
||||
for scp != nil {
|
||||
if scp.vid != sc.vid {
|
||||
break
|
||||
}
|
||||
if scp.sch == sc.sch {
|
||||
return scp
|
||||
}
|
||||
scp = scp.parent
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sc *scope) kwLoc() string {
|
||||
var loc string
|
||||
for sc.parent != nil {
|
||||
if sc.refKeyword != "" {
|
||||
loc = fmt.Sprintf("/%s%s", escape(sc.refKeyword), loc)
|
||||
} else {
|
||||
cur := sc.sch.Location
|
||||
parent := sc.parent.sch.Location
|
||||
loc = fmt.Sprintf("%s%s", cur[len(parent):], loc)
|
||||
}
|
||||
sc = sc.parent
|
||||
}
|
||||
return loc
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type uneval struct {
|
||||
props map[string]struct{}
|
||||
items map[int]struct{}
|
||||
}
|
||||
|
||||
func unevalFrom(v any, sch *Schema, callerNeeds bool) *uneval {
|
||||
uneval := &uneval{}
|
||||
switch v := v.(type) {
|
||||
case map[string]any:
|
||||
if !sch.allPropsEvaluated && (callerNeeds || sch.UnevaluatedProperties != nil) {
|
||||
uneval.props = map[string]struct{}{}
|
||||
for k := range v {
|
||||
uneval.props[k] = struct{}{}
|
||||
}
|
||||
}
|
||||
case []any:
|
||||
if !sch.allItemsEvaluated && (callerNeeds || sch.UnevaluatedItems != nil) && sch.numItemsEvaluated < len(v) {
|
||||
uneval.items = map[int]struct{}{}
|
||||
for i := sch.numItemsEvaluated; i < len(v); i++ {
|
||||
uneval.items[i] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
return uneval
|
||||
}
|
||||
|
||||
func (ue *uneval) merge(other *uneval) {
|
||||
for k := range ue.props {
|
||||
if _, ok := other.props[k]; !ok {
|
||||
delete(ue.props, k)
|
||||
}
|
||||
}
|
||||
for i := range ue.items {
|
||||
if _, ok := other.items[i]; !ok {
|
||||
delete(ue.items, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (ue *uneval) isEmpty() bool {
|
||||
return len(ue.props) == 0 && len(ue.items) == 0
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
type ValidationError struct {
|
||||
// absolute, dereferenced schema location.
|
||||
SchemaURL string
|
||||
|
||||
// location of the JSON value within the instance being validated.
|
||||
InstanceLocation []string
|
||||
|
||||
// kind of error
|
||||
ErrorKind ErrorKind
|
||||
|
||||
// holds nested errors
|
||||
Causes []*ValidationError
|
||||
}
|
||||
|
||||
type ErrorKind interface {
|
||||
KeywordPath() []string
|
||||
LocalizedString(*message.Printer) string
|
||||
}
|
||||
106
vendor/github.com/santhosh-tekuri/jsonschema/v6/vocab.go
generated
vendored
Normal file
106
vendor/github.com/santhosh-tekuri/jsonschema/v6/vocab.go
generated
vendored
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
package jsonschema
|
||||
|
||||
// CompilerContext provides helpers for
|
||||
// compiling a [Vocabulary].
|
||||
type CompilerContext struct {
|
||||
c *objCompiler
|
||||
}
|
||||
|
||||
func (ctx *CompilerContext) Enqueue(schPath []string) *Schema {
|
||||
ptr := ctx.c.up.ptr
|
||||
for _, tok := range schPath {
|
||||
ptr = ptr.append(tok)
|
||||
}
|
||||
return ctx.c.enqueuePtr(ptr)
|
||||
}
|
||||
|
||||
// Vocabulary defines a set of keywords, their syntax and
|
||||
// their semantics.
|
||||
type Vocabulary struct {
|
||||
// URL identifier for this Vocabulary.
|
||||
URL string
|
||||
|
||||
// Schema that is used to validate the keywords that is introduced by this
|
||||
// vocabulary.
|
||||
Schema *Schema
|
||||
|
||||
// Subschemas lists the possible locations of subschemas introduced by
|
||||
// this vocabulary.
|
||||
Subschemas []SchemaPath
|
||||
|
||||
// Compile compiles the keywords(introduced by this vocabulary) in obj into [SchemaExt].
|
||||
// If obj does not contain any keywords introduced by this vocabulary, nil SchemaExt must
|
||||
// be returned.
|
||||
Compile func(ctx *CompilerContext, obj map[string]any) (SchemaExt, error)
|
||||
}
|
||||
|
||||
// --
|
||||
|
||||
// SchemaExt is compled form of vocabulary.
|
||||
type SchemaExt interface {
|
||||
// Validate validates v against and errors if any are reported
|
||||
// to ctx.
|
||||
Validate(ctx *ValidatorContext, v any)
|
||||
}
|
||||
|
||||
// ValidatorContext provides helpers for
|
||||
// validating with [SchemaExt].
|
||||
type ValidatorContext struct {
|
||||
vd *validator
|
||||
}
|
||||
|
||||
// Validate validates v with sch. vpath gives path of v from current context value.
|
||||
func (ctx *ValidatorContext) Validate(sch *Schema, v any, vpath []string) error {
|
||||
switch len(vpath) {
|
||||
case 0:
|
||||
return ctx.vd.validateSelf(sch, "", false)
|
||||
case 1:
|
||||
return ctx.vd.validateVal(sch, v, vpath[0])
|
||||
default:
|
||||
return ctx.vd.validateValue(sch, v, vpath)
|
||||
}
|
||||
}
|
||||
|
||||
// EvaluatedProp marks given property of current object as evaluated.
|
||||
func (ctx *ValidatorContext) EvaluatedProp(pname string) {
|
||||
delete(ctx.vd.uneval.props, pname)
|
||||
}
|
||||
|
||||
// EvaluatedItem marks items at given index of current array as evaluated.
|
||||
func (ctx *ValidatorContext) EvaluatedItem(index int) {
|
||||
delete(ctx.vd.uneval.items, index)
|
||||
}
|
||||
|
||||
// AddError reports validation-error of given kind.
|
||||
func (ctx *ValidatorContext) AddError(k ErrorKind) {
|
||||
ctx.vd.addError(k)
|
||||
}
|
||||
|
||||
// AddErrors reports validation-errors of given kind.
|
||||
func (ctx *ValidatorContext) AddErrors(errors []*ValidationError, k ErrorKind) {
|
||||
ctx.vd.addErrors(errors, k)
|
||||
}
|
||||
|
||||
// AddErr reports the given err. This is typically used to report
|
||||
// the error created by subschema validation.
|
||||
//
|
||||
// NOTE that err must be of type *ValidationError.
|
||||
func (ctx *ValidatorContext) AddErr(err error) {
|
||||
ctx.vd.addErr(err)
|
||||
}
|
||||
|
||||
func (ctx *ValidatorContext) Equals(v1, v2 any) (bool, error) {
|
||||
b, k := equals(v1, v2)
|
||||
if k != nil {
|
||||
return false, ctx.vd.error(k)
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func (ctx *ValidatorContext) Duplicates(arr []any) (int, int, error) {
|
||||
i, j, k := duplicates(arr)
|
||||
if k != nil {
|
||||
return -1, -1, ctx.vd.error(k)
|
||||
}
|
||||
return i, j, nil
|
||||
}
|
||||
27
vendor/golang.org/x/text/LICENSE
generated
vendored
Normal file
27
vendor/golang.org/x/text/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
Copyright 2009 The Go Authors.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google LLC nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
22
vendor/golang.org/x/text/PATENTS
generated
vendored
Normal file
22
vendor/golang.org/x/text/PATENTS
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
Additional IP Rights Grant (Patents)
|
||||
|
||||
"This implementation" means the copyrightable works distributed by
|
||||
Google as part of the Go project.
|
||||
|
||||
Google hereby grants to You a perpetual, worldwide, non-exclusive,
|
||||
no-charge, royalty-free, irrevocable (except as stated in this section)
|
||||
patent license to make, have made, use, offer to sell, sell, import,
|
||||
transfer and otherwise run, modify and propagate the contents of this
|
||||
implementation of Go, where such license applies only to those patent
|
||||
claims, both currently owned or controlled by Google and acquired in
|
||||
the future, licensable by Google that are necessarily infringed by this
|
||||
implementation of Go. This grant does not include claims that would be
|
||||
infringed only as a consequence of further modification of this
|
||||
implementation. If you or your agent or exclusive licensee institute or
|
||||
order or agree to the institution of patent litigation against any
|
||||
entity (including a cross-claim or counterclaim in a lawsuit) alleging
|
||||
that this implementation of Go or any code incorporated within this
|
||||
implementation of Go constitutes direct or contributory patent
|
||||
infringement, or inducement of patent infringement, then any patent
|
||||
rights granted to you under this License for this implementation of Go
|
||||
shall terminate as of the date such litigation is filed.
|
||||
70
vendor/golang.org/x/text/feature/plural/common.go
generated
vendored
Normal file
70
vendor/golang.org/x/text/feature/plural/common.go
generated
vendored
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
|
||||
|
||||
package plural
|
||||
|
||||
// Form defines a plural form.
|
||||
//
|
||||
// Not all languages support all forms. Also, the meaning of each form varies
|
||||
// per language. It is important to note that the name of a form does not
|
||||
// necessarily correspond one-to-one with the set of numbers. For instance,
|
||||
// for Croation, One matches not only 1, but also 11, 21, etc.
|
||||
//
|
||||
// Each language must at least support the form "other".
|
||||
type Form byte
|
||||
|
||||
const (
|
||||
Other Form = iota
|
||||
Zero
|
||||
One
|
||||
Two
|
||||
Few
|
||||
Many
|
||||
)
|
||||
|
||||
var countMap = map[string]Form{
|
||||
"other": Other,
|
||||
"zero": Zero,
|
||||
"one": One,
|
||||
"two": Two,
|
||||
"few": Few,
|
||||
"many": Many,
|
||||
}
|
||||
|
||||
type pluralCheck struct {
|
||||
// category:
|
||||
// 3..7: opID
|
||||
// 0..2: category
|
||||
cat byte
|
||||
setID byte
|
||||
}
|
||||
|
||||
// opID identifies the type of operand in the plural rule, being i, n or f.
|
||||
// (v, w, and t are treated as filters in our implementation.)
|
||||
type opID byte
|
||||
|
||||
const (
|
||||
opMod opID = 0x1 // is '%' used?
|
||||
opNotEqual opID = 0x2 // using "!=" to compare
|
||||
opI opID = 0 << 2 // integers after taking the absolute value
|
||||
opN opID = 1 << 2 // full number (must be integer)
|
||||
opF opID = 2 << 2 // fraction
|
||||
opV opID = 3 << 2 // number of visible digits
|
||||
opW opID = 4 << 2 // number of visible digits without trailing zeros
|
||||
opBretonM opID = 5 << 2 // hard-wired rule for Breton
|
||||
opItalian800 opID = 6 << 2 // hard-wired rule for Italian
|
||||
opAzerbaijan00s opID = 7 << 2 // hard-wired rule for Azerbaijan
|
||||
)
|
||||
const (
|
||||
// Use this plural form to indicate the next rule needs to match as well.
|
||||
// The last condition in the list will have the correct plural form.
|
||||
andNext = 0x7
|
||||
formMask = 0x7
|
||||
|
||||
opShift = 3
|
||||
|
||||
// numN indicates the maximum integer, or maximum mod value, for which we
|
||||
// have inclusion masks.
|
||||
numN = 100
|
||||
// The common denominator of the modulo that is taken.
|
||||
maxMod = 100
|
||||
)
|
||||
244
vendor/golang.org/x/text/feature/plural/message.go
generated
vendored
Normal file
244
vendor/golang.org/x/text/feature/plural/message.go
generated
vendored
Normal file
|
|
@ -0,0 +1,244 @@
|
|||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package plural
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"strconv"
|
||||
|
||||
"golang.org/x/text/internal/catmsg"
|
||||
"golang.org/x/text/internal/number"
|
||||
"golang.org/x/text/language"
|
||||
"golang.org/x/text/message/catalog"
|
||||
)
|
||||
|
||||
// TODO: consider deleting this interface. Maybe VisibleDigits is always
|
||||
// sufficient and practical.
|
||||
|
||||
// Interface is used for types that can determine their own plural form.
|
||||
type Interface interface {
|
||||
// PluralForm reports the plural form for the given language of the
|
||||
// underlying value. It also returns the integer value. If the integer value
|
||||
// is larger than fits in n, PluralForm may return a value modulo
|
||||
// 10,000,000.
|
||||
PluralForm(t language.Tag, scale int) (f Form, n int)
|
||||
}
|
||||
|
||||
// Selectf returns the first case for which its selector is a match for the
|
||||
// arg-th substitution argument to a formatting call, formatting it as indicated
|
||||
// by format.
|
||||
//
|
||||
// The cases argument are pairs of selectors and messages. Selectors are of type
|
||||
// string or Form. Messages are of type string or catalog.Message. A selector
|
||||
// matches an argument if:
|
||||
// - it is "other" or Other
|
||||
// - it matches the plural form of the argument: "zero", "one", "two", "few",
|
||||
// or "many", or the equivalent Form
|
||||
// - it is of the form "=x" where x is an integer that matches the value of
|
||||
// the argument.
|
||||
// - it is of the form "<x" where x is an integer that is larger than the
|
||||
// argument.
|
||||
//
|
||||
// The format argument determines the formatting parameters for which to
|
||||
// determine the plural form. This is especially relevant for non-integer
|
||||
// values.
|
||||
//
|
||||
// The format string may be "", in which case a best-effort attempt is made to
|
||||
// find a reasonable representation on which to base the plural form. Examples
|
||||
// of format strings are:
|
||||
// - %.2f decimal with scale 2
|
||||
// - %.2e scientific notation with precision 3 (scale + 1)
|
||||
// - %d integer
|
||||
func Selectf(arg int, format string, cases ...interface{}) catalog.Message {
|
||||
var p parser
|
||||
// Intercept the formatting parameters of format by doing a dummy print.
|
||||
fmt.Fprintf(io.Discard, format, &p)
|
||||
m := &message{arg, kindDefault, 0, cases}
|
||||
switch p.verb {
|
||||
case 'g':
|
||||
m.kind = kindPrecision
|
||||
m.scale = p.scale
|
||||
case 'f':
|
||||
m.kind = kindScale
|
||||
m.scale = p.scale
|
||||
case 'e':
|
||||
m.kind = kindScientific
|
||||
m.scale = p.scale
|
||||
case 'd':
|
||||
m.kind = kindScale
|
||||
m.scale = 0
|
||||
default:
|
||||
// TODO: do we need to handle errors?
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
type parser struct {
|
||||
verb rune
|
||||
scale int
|
||||
}
|
||||
|
||||
func (p *parser) Format(s fmt.State, verb rune) {
|
||||
p.verb = verb
|
||||
p.scale = -1
|
||||
if prec, ok := s.Precision(); ok {
|
||||
p.scale = prec
|
||||
}
|
||||
}
|
||||
|
||||
type message struct {
|
||||
arg int
|
||||
kind int
|
||||
scale int
|
||||
cases []interface{}
|
||||
}
|
||||
|
||||
const (
|
||||
// Start with non-ASCII to allow skipping values.
|
||||
kindDefault = 0x80 + iota
|
||||
kindScale // verb f, number of fraction digits follows
|
||||
kindScientific // verb e, number of fraction digits follows
|
||||
kindPrecision // verb g, number of significant digits follows
|
||||
)
|
||||
|
||||
var handle = catmsg.Register("golang.org/x/text/feature/plural:plural", execute)
|
||||
|
||||
func (m *message) Compile(e *catmsg.Encoder) error {
|
||||
e.EncodeMessageType(handle)
|
||||
|
||||
e.EncodeUint(uint64(m.arg))
|
||||
|
||||
e.EncodeUint(uint64(m.kind))
|
||||
if m.kind > kindDefault {
|
||||
e.EncodeUint(uint64(m.scale))
|
||||
}
|
||||
|
||||
forms := validForms(cardinal, e.Language())
|
||||
|
||||
for i := 0; i < len(m.cases); {
|
||||
if err := compileSelector(e, forms, m.cases[i]); err != nil {
|
||||
return err
|
||||
}
|
||||
if i++; i >= len(m.cases) {
|
||||
return fmt.Errorf("plural: no message defined for selector %v", m.cases[i-1])
|
||||
}
|
||||
var msg catalog.Message
|
||||
switch x := m.cases[i].(type) {
|
||||
case string:
|
||||
msg = catalog.String(x)
|
||||
case catalog.Message:
|
||||
msg = x
|
||||
default:
|
||||
return fmt.Errorf("plural: message of type %T; must be string or catalog.Message", x)
|
||||
}
|
||||
if err := e.EncodeMessage(msg); err != nil {
|
||||
return err
|
||||
}
|
||||
i++
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func compileSelector(e *catmsg.Encoder, valid []Form, selector interface{}) error {
|
||||
form := Other
|
||||
switch x := selector.(type) {
|
||||
case string:
|
||||
if x == "" {
|
||||
return fmt.Errorf("plural: empty selector")
|
||||
}
|
||||
if c := x[0]; c == '=' || c == '<' {
|
||||
val, err := strconv.ParseUint(x[1:], 10, 16)
|
||||
if err != nil {
|
||||
return fmt.Errorf("plural: invalid number in selector %q: %v", selector, err)
|
||||
}
|
||||
e.EncodeUint(uint64(c))
|
||||
e.EncodeUint(val)
|
||||
return nil
|
||||
}
|
||||
var ok bool
|
||||
form, ok = countMap[x]
|
||||
if !ok {
|
||||
return fmt.Errorf("plural: invalid plural form %q", selector)
|
||||
}
|
||||
case Form:
|
||||
form = x
|
||||
default:
|
||||
return fmt.Errorf("plural: selector of type %T; want string or Form", selector)
|
||||
}
|
||||
|
||||
ok := false
|
||||
for _, f := range valid {
|
||||
if f == form {
|
||||
ok = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !ok {
|
||||
return fmt.Errorf("plural: form %q not supported for language %q", selector, e.Language())
|
||||
}
|
||||
e.EncodeUint(uint64(form))
|
||||
return nil
|
||||
}
|
||||
|
||||
func execute(d *catmsg.Decoder) bool {
|
||||
lang := d.Language()
|
||||
argN := int(d.DecodeUint())
|
||||
kind := int(d.DecodeUint())
|
||||
scale := -1 // default
|
||||
if kind > kindDefault {
|
||||
scale = int(d.DecodeUint())
|
||||
}
|
||||
form := Other
|
||||
n := -1
|
||||
if arg := d.Arg(argN); arg == nil {
|
||||
// Default to Other.
|
||||
} else if x, ok := arg.(number.VisibleDigits); ok {
|
||||
d := x.Digits(nil, lang, scale)
|
||||
form, n = cardinal.matchDisplayDigits(lang, &d)
|
||||
} else if x, ok := arg.(Interface); ok {
|
||||
// This covers lists and formatters from the number package.
|
||||
form, n = x.PluralForm(lang, scale)
|
||||
} else {
|
||||
var f number.Formatter
|
||||
switch kind {
|
||||
case kindScale:
|
||||
f.InitDecimal(lang)
|
||||
f.SetScale(scale)
|
||||
case kindScientific:
|
||||
f.InitScientific(lang)
|
||||
f.SetScale(scale)
|
||||
case kindPrecision:
|
||||
f.InitDecimal(lang)
|
||||
f.SetPrecision(scale)
|
||||
case kindDefault:
|
||||
// sensible default
|
||||
f.InitDecimal(lang)
|
||||
if k := reflect.TypeOf(arg).Kind(); reflect.Int <= k && k <= reflect.Uintptr {
|
||||
f.SetScale(0)
|
||||
} else {
|
||||
f.SetScale(2)
|
||||
}
|
||||
}
|
||||
var dec number.Decimal // TODO: buffer in Printer
|
||||
dec.Convert(f.RoundingContext, arg)
|
||||
v := number.FormatDigits(&dec, f.RoundingContext)
|
||||
if !v.NaN && !v.Inf {
|
||||
form, n = cardinal.matchDisplayDigits(d.Language(), &v)
|
||||
}
|
||||
}
|
||||
for !d.Done() {
|
||||
f := d.DecodeUint()
|
||||
if (f == '=' && n == int(d.DecodeUint())) ||
|
||||
(f == '<' && 0 <= n && n < int(d.DecodeUint())) ||
|
||||
form == Form(f) ||
|
||||
Other == Form(f) {
|
||||
return d.ExecuteMessage()
|
||||
}
|
||||
d.SkipMessage()
|
||||
}
|
||||
return false
|
||||
}
|
||||
262
vendor/golang.org/x/text/feature/plural/plural.go
generated
vendored
Normal file
262
vendor/golang.org/x/text/feature/plural/plural.go
generated
vendored
Normal file
|
|
@ -0,0 +1,262 @@
|
|||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:generate go run gen.go gen_common.go
|
||||
|
||||
// Package plural provides utilities for handling linguistic plurals in text.
|
||||
//
|
||||
// The definitions in this package are based on the plural rule handling defined
|
||||
// in CLDR. See
|
||||
// https://unicode.org/reports/tr35/tr35-numbers.html#Language_Plural_Rules for
|
||||
// details.
|
||||
package plural
|
||||
|
||||
import (
|
||||
"golang.org/x/text/internal/language/compact"
|
||||
"golang.org/x/text/internal/number"
|
||||
"golang.org/x/text/language"
|
||||
)
|
||||
|
||||
// Rules defines the plural rules for all languages for a certain plural type.
|
||||
//
|
||||
// This package is UNDER CONSTRUCTION and its API may change.
|
||||
type Rules struct {
|
||||
rules []pluralCheck
|
||||
index []byte
|
||||
langToIndex []byte
|
||||
inclusionMasks []uint64
|
||||
}
|
||||
|
||||
var (
|
||||
// Cardinal defines the plural rules for numbers indicating quantities.
|
||||
Cardinal *Rules = cardinal
|
||||
|
||||
// Ordinal defines the plural rules for numbers indicating position
|
||||
// (first, second, etc.).
|
||||
Ordinal *Rules = ordinal
|
||||
|
||||
ordinal = &Rules{
|
||||
ordinalRules,
|
||||
ordinalIndex,
|
||||
ordinalLangToIndex,
|
||||
ordinalInclusionMasks[:],
|
||||
}
|
||||
|
||||
cardinal = &Rules{
|
||||
cardinalRules,
|
||||
cardinalIndex,
|
||||
cardinalLangToIndex,
|
||||
cardinalInclusionMasks[:],
|
||||
}
|
||||
)
|
||||
|
||||
// getIntApprox converts the digits in slice digits[start:end] to an integer
|
||||
// according to the following rules:
|
||||
// - Let i be asInt(digits[start:end]), where out-of-range digits are assumed
|
||||
// to be zero.
|
||||
// - Result n is big if i / 10^nMod > 1.
|
||||
// - Otherwise the result is i % 10^nMod.
|
||||
//
|
||||
// For example, if digits is {1, 2, 3} and start:end is 0:5, then the result
|
||||
// for various values of nMod is:
|
||||
// - when nMod == 2, n == big
|
||||
// - when nMod == 3, n == big
|
||||
// - when nMod == 4, n == big
|
||||
// - when nMod == 5, n == 12300
|
||||
// - when nMod == 6, n == 12300
|
||||
// - when nMod == 7, n == 12300
|
||||
func getIntApprox(digits []byte, start, end, nMod, big int) (n int) {
|
||||
// Leading 0 digits just result in 0.
|
||||
p := start
|
||||
if p < 0 {
|
||||
p = 0
|
||||
}
|
||||
// Range only over the part for which we have digits.
|
||||
mid := end
|
||||
if mid >= len(digits) {
|
||||
mid = len(digits)
|
||||
}
|
||||
// Check digits more significant that nMod.
|
||||
if q := end - nMod; q > 0 {
|
||||
if q > mid {
|
||||
q = mid
|
||||
}
|
||||
for ; p < q; p++ {
|
||||
if digits[p] != 0 {
|
||||
return big
|
||||
}
|
||||
}
|
||||
}
|
||||
for ; p < mid; p++ {
|
||||
n = 10*n + int(digits[p])
|
||||
}
|
||||
// Multiply for trailing zeros.
|
||||
for ; p < end; p++ {
|
||||
n *= 10
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// MatchDigits computes the plural form for the given language and the given
|
||||
// decimal floating point digits. The digits are stored in big-endian order and
|
||||
// are of value byte(0) - byte(9). The floating point position is indicated by
|
||||
// exp and the number of visible decimals is scale. All leading and trailing
|
||||
// zeros may be omitted from digits.
|
||||
//
|
||||
// The following table contains examples of possible arguments to represent
|
||||
// the given numbers.
|
||||
//
|
||||
// decimal digits exp scale
|
||||
// 123 []byte{1, 2, 3} 3 0
|
||||
// 123.4 []byte{1, 2, 3, 4} 3 1
|
||||
// 123.40 []byte{1, 2, 3, 4} 3 2
|
||||
// 100000 []byte{1} 6 0
|
||||
// 100000.00 []byte{1} 6 3
|
||||
func (p *Rules) MatchDigits(t language.Tag, digits []byte, exp, scale int) Form {
|
||||
index := tagToID(t)
|
||||
|
||||
// Differentiate up to including mod 1000000 for the integer part.
|
||||
n := getIntApprox(digits, 0, exp, 6, 1000000)
|
||||
|
||||
// Differentiate up to including mod 100 for the fractional part.
|
||||
f := getIntApprox(digits, exp, exp+scale, 2, 100)
|
||||
|
||||
return matchPlural(p, index, n, f, scale)
|
||||
}
|
||||
|
||||
func (p *Rules) matchDisplayDigits(t language.Tag, d *number.Digits) (Form, int) {
|
||||
n := getIntApprox(d.Digits, 0, int(d.Exp), 6, 1000000)
|
||||
return p.MatchDigits(t, d.Digits, int(d.Exp), d.NumFracDigits()), n
|
||||
}
|
||||
|
||||
func validForms(p *Rules, t language.Tag) (forms []Form) {
|
||||
offset := p.langToIndex[tagToID(t)]
|
||||
rules := p.rules[p.index[offset]:p.index[offset+1]]
|
||||
|
||||
forms = append(forms, Other)
|
||||
last := Other
|
||||
for _, r := range rules {
|
||||
if cat := Form(r.cat & formMask); cat != andNext && last != cat {
|
||||
forms = append(forms, cat)
|
||||
last = cat
|
||||
}
|
||||
}
|
||||
return forms
|
||||
}
|
||||
|
||||
func (p *Rules) matchComponents(t language.Tag, n, f, scale int) Form {
|
||||
return matchPlural(p, tagToID(t), n, f, scale)
|
||||
}
|
||||
|
||||
// MatchPlural returns the plural form for the given language and plural
|
||||
// operands (as defined in
|
||||
// https://unicode.org/reports/tr35/tr35-numbers.html#Language_Plural_Rules):
|
||||
//
|
||||
// where
|
||||
// n absolute value of the source number (integer and decimals)
|
||||
// input
|
||||
// i integer digits of n.
|
||||
// v number of visible fraction digits in n, with trailing zeros.
|
||||
// w number of visible fraction digits in n, without trailing zeros.
|
||||
// f visible fractional digits in n, with trailing zeros (f = t * 10^(v-w))
|
||||
// t visible fractional digits in n, without trailing zeros.
|
||||
//
|
||||
// If any of the operand values is too large to fit in an int, it is okay to
|
||||
// pass the value modulo 10,000,000.
|
||||
func (p *Rules) MatchPlural(lang language.Tag, i, v, w, f, t int) Form {
|
||||
return matchPlural(p, tagToID(lang), i, f, v)
|
||||
}
|
||||
|
||||
func matchPlural(p *Rules, index compact.ID, n, f, v int) Form {
|
||||
nMask := p.inclusionMasks[n%maxMod]
|
||||
// Compute the fMask inline in the rules below, as it is relatively rare.
|
||||
// fMask := p.inclusionMasks[f%maxMod]
|
||||
vMask := p.inclusionMasks[v%maxMod]
|
||||
|
||||
// Do the matching
|
||||
offset := p.langToIndex[index]
|
||||
rules := p.rules[p.index[offset]:p.index[offset+1]]
|
||||
for i := 0; i < len(rules); i++ {
|
||||
rule := rules[i]
|
||||
setBit := uint64(1 << rule.setID)
|
||||
var skip bool
|
||||
switch op := opID(rule.cat >> opShift); op {
|
||||
case opI: // i = x
|
||||
skip = n >= numN || nMask&setBit == 0
|
||||
|
||||
case opI | opNotEqual: // i != x
|
||||
skip = n < numN && nMask&setBit != 0
|
||||
|
||||
case opI | opMod: // i % m = x
|
||||
skip = nMask&setBit == 0
|
||||
|
||||
case opI | opMod | opNotEqual: // i % m != x
|
||||
skip = nMask&setBit != 0
|
||||
|
||||
case opN: // n = x
|
||||
skip = f != 0 || n >= numN || nMask&setBit == 0
|
||||
|
||||
case opN | opNotEqual: // n != x
|
||||
skip = f == 0 && n < numN && nMask&setBit != 0
|
||||
|
||||
case opN | opMod: // n % m = x
|
||||
skip = f != 0 || nMask&setBit == 0
|
||||
|
||||
case opN | opMod | opNotEqual: // n % m != x
|
||||
skip = f == 0 && nMask&setBit != 0
|
||||
|
||||
case opF: // f = x
|
||||
skip = f >= numN || p.inclusionMasks[f%maxMod]&setBit == 0
|
||||
|
||||
case opF | opNotEqual: // f != x
|
||||
skip = f < numN && p.inclusionMasks[f%maxMod]&setBit != 0
|
||||
|
||||
case opF | opMod: // f % m = x
|
||||
skip = p.inclusionMasks[f%maxMod]&setBit == 0
|
||||
|
||||
case opF | opMod | opNotEqual: // f % m != x
|
||||
skip = p.inclusionMasks[f%maxMod]&setBit != 0
|
||||
|
||||
case opV: // v = x
|
||||
skip = v < numN && vMask&setBit == 0
|
||||
|
||||
case opV | opNotEqual: // v != x
|
||||
skip = v < numN && vMask&setBit != 0
|
||||
|
||||
case opW: // w == 0
|
||||
skip = f != 0
|
||||
|
||||
case opW | opNotEqual: // w != 0
|
||||
skip = f == 0
|
||||
|
||||
// Hard-wired rules that cannot be handled by our algorithm.
|
||||
|
||||
case opBretonM:
|
||||
skip = f != 0 || n == 0 || n%1000000 != 0
|
||||
|
||||
case opAzerbaijan00s:
|
||||
// 100,200,300,400,500,600,700,800,900
|
||||
skip = n == 0 || n >= 1000 || n%100 != 0
|
||||
|
||||
case opItalian800:
|
||||
skip = (f != 0 || n >= numN || nMask&setBit == 0) && n != 800
|
||||
}
|
||||
if skip {
|
||||
// advance over AND entries.
|
||||
for ; i < len(rules) && rules[i].cat&formMask == andNext; i++ {
|
||||
}
|
||||
continue
|
||||
}
|
||||
// return if we have a final entry.
|
||||
if cat := rule.cat & formMask; cat != andNext {
|
||||
return Form(cat)
|
||||
}
|
||||
}
|
||||
return Other
|
||||
}
|
||||
|
||||
func tagToID(t language.Tag) compact.ID {
|
||||
id, _ := compact.RegionalID(compact.Tag(t))
|
||||
return id
|
||||
}
|
||||
552
vendor/golang.org/x/text/feature/plural/tables.go
generated
vendored
Normal file
552
vendor/golang.org/x/text/feature/plural/tables.go
generated
vendored
Normal file
|
|
@ -0,0 +1,552 @@
|
|||
// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
|
||||
|
||||
package plural
|
||||
|
||||
// CLDRVersion is the CLDR version from which the tables in this package are derived.
|
||||
const CLDRVersion = "32"
|
||||
|
||||
var ordinalRules = []pluralCheck{ // 64 elements
|
||||
0: {cat: 0x2f, setID: 0x4},
|
||||
1: {cat: 0x3a, setID: 0x5},
|
||||
2: {cat: 0x22, setID: 0x1},
|
||||
3: {cat: 0x22, setID: 0x6},
|
||||
4: {cat: 0x22, setID: 0x7},
|
||||
5: {cat: 0x2f, setID: 0x8},
|
||||
6: {cat: 0x3c, setID: 0x9},
|
||||
7: {cat: 0x2f, setID: 0xa},
|
||||
8: {cat: 0x3c, setID: 0xb},
|
||||
9: {cat: 0x2c, setID: 0xc},
|
||||
10: {cat: 0x24, setID: 0xd},
|
||||
11: {cat: 0x2d, setID: 0xe},
|
||||
12: {cat: 0x2d, setID: 0xf},
|
||||
13: {cat: 0x2f, setID: 0x10},
|
||||
14: {cat: 0x35, setID: 0x3},
|
||||
15: {cat: 0xc5, setID: 0x11},
|
||||
16: {cat: 0x2, setID: 0x1},
|
||||
17: {cat: 0x5, setID: 0x3},
|
||||
18: {cat: 0xd, setID: 0x12},
|
||||
19: {cat: 0x22, setID: 0x1},
|
||||
20: {cat: 0x2f, setID: 0x13},
|
||||
21: {cat: 0x3d, setID: 0x14},
|
||||
22: {cat: 0x2f, setID: 0x15},
|
||||
23: {cat: 0x3a, setID: 0x16},
|
||||
24: {cat: 0x2f, setID: 0x17},
|
||||
25: {cat: 0x3b, setID: 0x18},
|
||||
26: {cat: 0x2f, setID: 0xa},
|
||||
27: {cat: 0x3c, setID: 0xb},
|
||||
28: {cat: 0x22, setID: 0x1},
|
||||
29: {cat: 0x23, setID: 0x19},
|
||||
30: {cat: 0x24, setID: 0x1a},
|
||||
31: {cat: 0x22, setID: 0x1b},
|
||||
32: {cat: 0x23, setID: 0x2},
|
||||
33: {cat: 0x24, setID: 0x1a},
|
||||
34: {cat: 0xf, setID: 0x15},
|
||||
35: {cat: 0x1a, setID: 0x16},
|
||||
36: {cat: 0xf, setID: 0x17},
|
||||
37: {cat: 0x1b, setID: 0x18},
|
||||
38: {cat: 0xf, setID: 0x1c},
|
||||
39: {cat: 0x1d, setID: 0x1d},
|
||||
40: {cat: 0xa, setID: 0x1e},
|
||||
41: {cat: 0xa, setID: 0x1f},
|
||||
42: {cat: 0xc, setID: 0x20},
|
||||
43: {cat: 0xe4, setID: 0x0},
|
||||
44: {cat: 0x5, setID: 0x3},
|
||||
45: {cat: 0xd, setID: 0xe},
|
||||
46: {cat: 0xd, setID: 0x21},
|
||||
47: {cat: 0x22, setID: 0x1},
|
||||
48: {cat: 0x23, setID: 0x19},
|
||||
49: {cat: 0x24, setID: 0x1a},
|
||||
50: {cat: 0x25, setID: 0x22},
|
||||
51: {cat: 0x22, setID: 0x23},
|
||||
52: {cat: 0x23, setID: 0x19},
|
||||
53: {cat: 0x24, setID: 0x1a},
|
||||
54: {cat: 0x25, setID: 0x22},
|
||||
55: {cat: 0x22, setID: 0x24},
|
||||
56: {cat: 0x23, setID: 0x19},
|
||||
57: {cat: 0x24, setID: 0x1a},
|
||||
58: {cat: 0x25, setID: 0x22},
|
||||
59: {cat: 0x21, setID: 0x25},
|
||||
60: {cat: 0x22, setID: 0x1},
|
||||
61: {cat: 0x23, setID: 0x2},
|
||||
62: {cat: 0x24, setID: 0x26},
|
||||
63: {cat: 0x25, setID: 0x27},
|
||||
} // Size: 152 bytes
|
||||
|
||||
var ordinalIndex = []uint8{ // 22 elements
|
||||
0x00, 0x00, 0x02, 0x03, 0x04, 0x05, 0x07, 0x09,
|
||||
0x0b, 0x0f, 0x10, 0x13, 0x16, 0x1c, 0x1f, 0x22,
|
||||
0x28, 0x2f, 0x33, 0x37, 0x3b, 0x40,
|
||||
} // Size: 46 bytes
|
||||
|
||||
var ordinalLangToIndex = []uint8{ // 775 elements
|
||||
// Entry 0 - 3F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x12, 0x12, 0x00, 0x00, 0x00, 0x00, 0x10, 0x10,
|
||||
0x10, 0x10, 0x10, 0x00, 0x00, 0x05, 0x05, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 40 - 7F
|
||||
0x12, 0x12, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e,
|
||||
0x0e, 0x0e, 0x0e, 0x0e, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x14, 0x14, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 80 - BF
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
// Entry C0 - FF
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
|
||||
0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 100 - 13F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02,
|
||||
0x00, 0x00, 0x00, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
// Entry 140 - 17F
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x11, 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11,
|
||||
0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x03,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 180 - 1BF
|
||||
0x00, 0x00, 0x00, 0x00, 0x09, 0x09, 0x09, 0x09,
|
||||
0x09, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0x0a, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x08, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 1C0 - 1FF
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x0f, 0x0f, 0x00, 0x00,
|
||||
0x00, 0x00, 0x02, 0x0d, 0x0d, 0x02, 0x02, 0x02,
|
||||
0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 200 - 23F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x04, 0x04, 0x04, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x13, 0x13, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 240 - 27F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 280 - 2BF
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x0b, 0x0b, 0x0b, 0x0b, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x01,
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x07, 0x07, 0x02, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 2C0 - 2FF
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x06, 0x06, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x02, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 300 - 33F
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x0c,
|
||||
} // Size: 799 bytes
|
||||
|
||||
var ordinalInclusionMasks = []uint64{ // 100 elements
|
||||
// Entry 0 - 1F
|
||||
0x0000002000010009, 0x00000018482000d3, 0x0000000042840195, 0x000000410a040581,
|
||||
0x00000041040c0081, 0x0000009840040041, 0x0000008400045001, 0x0000003850040001,
|
||||
0x0000003850060001, 0x0000003800049001, 0x0000000800052001, 0x0000000040660031,
|
||||
0x0000000041840331, 0x0000000100040f01, 0x00000001001c0001, 0x0000000040040001,
|
||||
0x0000000000045001, 0x0000000070040001, 0x0000000070040001, 0x0000000000049001,
|
||||
0x0000000080050001, 0x0000000040200011, 0x0000000040800111, 0x0000000100000501,
|
||||
0x0000000100080001, 0x0000000040000001, 0x0000000000005001, 0x0000000050000001,
|
||||
0x0000000050000001, 0x0000000000009001, 0x0000000000010001, 0x0000000040200011,
|
||||
// Entry 20 - 3F
|
||||
0x0000000040800111, 0x0000000100000501, 0x0000000100080001, 0x0000000040000001,
|
||||
0x0000000000005001, 0x0000000050000001, 0x0000000050000001, 0x0000000000009001,
|
||||
0x0000000200050001, 0x0000000040200011, 0x0000000040800111, 0x0000000100000501,
|
||||
0x0000000100080001, 0x0000000040000001, 0x0000000000005001, 0x0000000050000001,
|
||||
0x0000000050000001, 0x0000000000009001, 0x0000000080010001, 0x0000000040200011,
|
||||
0x0000000040800111, 0x0000000100000501, 0x0000000100080001, 0x0000000040000001,
|
||||
0x0000000000005001, 0x0000000050000001, 0x0000000050000001, 0x0000000000009001,
|
||||
0x0000000200050001, 0x0000000040200011, 0x0000000040800111, 0x0000000100000501,
|
||||
// Entry 40 - 5F
|
||||
0x0000000100080001, 0x0000000040000001, 0x0000000000005001, 0x0000000050000001,
|
||||
0x0000000050000001, 0x0000000000009001, 0x0000000080010001, 0x0000000040200011,
|
||||
0x0000000040800111, 0x0000000100000501, 0x0000000100080001, 0x0000000040000001,
|
||||
0x0000000000005001, 0x0000000050000001, 0x0000000050000001, 0x0000000000009001,
|
||||
0x0000000080070001, 0x0000000040200011, 0x0000000040800111, 0x0000000100000501,
|
||||
0x0000000100080001, 0x0000000040000001, 0x0000000000005001, 0x0000000050000001,
|
||||
0x0000000050000001, 0x0000000000009001, 0x0000000200010001, 0x0000000040200011,
|
||||
0x0000000040800111, 0x0000000100000501, 0x0000000100080001, 0x0000000040000001,
|
||||
// Entry 60 - 7F
|
||||
0x0000000000005001, 0x0000000050000001, 0x0000000050000001, 0x0000000000009001,
|
||||
} // Size: 824 bytes
|
||||
|
||||
// Slots used for ordinal: 40 of 0xFF rules; 16 of 0xFF indexes; 40 of 64 sets
|
||||
|
||||
var cardinalRules = []pluralCheck{ // 166 elements
|
||||
0: {cat: 0x2, setID: 0x3},
|
||||
1: {cat: 0x22, setID: 0x1},
|
||||
2: {cat: 0x2, setID: 0x4},
|
||||
3: {cat: 0x2, setID: 0x4},
|
||||
4: {cat: 0x7, setID: 0x1},
|
||||
5: {cat: 0x62, setID: 0x3},
|
||||
6: {cat: 0x22, setID: 0x4},
|
||||
7: {cat: 0x7, setID: 0x3},
|
||||
8: {cat: 0x42, setID: 0x1},
|
||||
9: {cat: 0x22, setID: 0x4},
|
||||
10: {cat: 0x22, setID: 0x4},
|
||||
11: {cat: 0x22, setID: 0x5},
|
||||
12: {cat: 0x22, setID: 0x1},
|
||||
13: {cat: 0x22, setID: 0x1},
|
||||
14: {cat: 0x7, setID: 0x4},
|
||||
15: {cat: 0x92, setID: 0x3},
|
||||
16: {cat: 0xf, setID: 0x6},
|
||||
17: {cat: 0x1f, setID: 0x7},
|
||||
18: {cat: 0x82, setID: 0x3},
|
||||
19: {cat: 0x92, setID: 0x3},
|
||||
20: {cat: 0xf, setID: 0x6},
|
||||
21: {cat: 0x62, setID: 0x3},
|
||||
22: {cat: 0x4a, setID: 0x6},
|
||||
23: {cat: 0x7, setID: 0x8},
|
||||
24: {cat: 0x62, setID: 0x3},
|
||||
25: {cat: 0x1f, setID: 0x9},
|
||||
26: {cat: 0x62, setID: 0x3},
|
||||
27: {cat: 0x5f, setID: 0x9},
|
||||
28: {cat: 0x72, setID: 0x3},
|
||||
29: {cat: 0x29, setID: 0xa},
|
||||
30: {cat: 0x29, setID: 0xb},
|
||||
31: {cat: 0x4f, setID: 0xb},
|
||||
32: {cat: 0x61, setID: 0x2},
|
||||
33: {cat: 0x2f, setID: 0x6},
|
||||
34: {cat: 0x3a, setID: 0x7},
|
||||
35: {cat: 0x4f, setID: 0x6},
|
||||
36: {cat: 0x5f, setID: 0x7},
|
||||
37: {cat: 0x62, setID: 0x2},
|
||||
38: {cat: 0x4f, setID: 0x6},
|
||||
39: {cat: 0x72, setID: 0x2},
|
||||
40: {cat: 0x21, setID: 0x3},
|
||||
41: {cat: 0x7, setID: 0x4},
|
||||
42: {cat: 0x32, setID: 0x3},
|
||||
43: {cat: 0x21, setID: 0x3},
|
||||
44: {cat: 0x22, setID: 0x1},
|
||||
45: {cat: 0x22, setID: 0x1},
|
||||
46: {cat: 0x23, setID: 0x2},
|
||||
47: {cat: 0x2, setID: 0x3},
|
||||
48: {cat: 0x22, setID: 0x1},
|
||||
49: {cat: 0x24, setID: 0xc},
|
||||
50: {cat: 0x7, setID: 0x1},
|
||||
51: {cat: 0x62, setID: 0x3},
|
||||
52: {cat: 0x74, setID: 0x3},
|
||||
53: {cat: 0x24, setID: 0x3},
|
||||
54: {cat: 0x2f, setID: 0xd},
|
||||
55: {cat: 0x34, setID: 0x1},
|
||||
56: {cat: 0xf, setID: 0x6},
|
||||
57: {cat: 0x1f, setID: 0x7},
|
||||
58: {cat: 0x62, setID: 0x3},
|
||||
59: {cat: 0x4f, setID: 0x6},
|
||||
60: {cat: 0x5a, setID: 0x7},
|
||||
61: {cat: 0xf, setID: 0xe},
|
||||
62: {cat: 0x1f, setID: 0xf},
|
||||
63: {cat: 0x64, setID: 0x3},
|
||||
64: {cat: 0x4f, setID: 0xe},
|
||||
65: {cat: 0x5c, setID: 0xf},
|
||||
66: {cat: 0x22, setID: 0x10},
|
||||
67: {cat: 0x23, setID: 0x11},
|
||||
68: {cat: 0x24, setID: 0x12},
|
||||
69: {cat: 0xf, setID: 0x1},
|
||||
70: {cat: 0x62, setID: 0x3},
|
||||
71: {cat: 0xf, setID: 0x2},
|
||||
72: {cat: 0x63, setID: 0x3},
|
||||
73: {cat: 0xf, setID: 0x13},
|
||||
74: {cat: 0x64, setID: 0x3},
|
||||
75: {cat: 0x74, setID: 0x3},
|
||||
76: {cat: 0xf, setID: 0x1},
|
||||
77: {cat: 0x62, setID: 0x3},
|
||||
78: {cat: 0x4a, setID: 0x1},
|
||||
79: {cat: 0xf, setID: 0x2},
|
||||
80: {cat: 0x63, setID: 0x3},
|
||||
81: {cat: 0x4b, setID: 0x2},
|
||||
82: {cat: 0xf, setID: 0x13},
|
||||
83: {cat: 0x64, setID: 0x3},
|
||||
84: {cat: 0x4c, setID: 0x13},
|
||||
85: {cat: 0x7, setID: 0x1},
|
||||
86: {cat: 0x62, setID: 0x3},
|
||||
87: {cat: 0x7, setID: 0x2},
|
||||
88: {cat: 0x63, setID: 0x3},
|
||||
89: {cat: 0x2f, setID: 0xa},
|
||||
90: {cat: 0x37, setID: 0x14},
|
||||
91: {cat: 0x65, setID: 0x3},
|
||||
92: {cat: 0x7, setID: 0x1},
|
||||
93: {cat: 0x62, setID: 0x3},
|
||||
94: {cat: 0x7, setID: 0x15},
|
||||
95: {cat: 0x64, setID: 0x3},
|
||||
96: {cat: 0x75, setID: 0x3},
|
||||
97: {cat: 0x7, setID: 0x1},
|
||||
98: {cat: 0x62, setID: 0x3},
|
||||
99: {cat: 0xf, setID: 0xe},
|
||||
100: {cat: 0x1f, setID: 0xf},
|
||||
101: {cat: 0x64, setID: 0x3},
|
||||
102: {cat: 0xf, setID: 0x16},
|
||||
103: {cat: 0x17, setID: 0x1},
|
||||
104: {cat: 0x65, setID: 0x3},
|
||||
105: {cat: 0xf, setID: 0x17},
|
||||
106: {cat: 0x65, setID: 0x3},
|
||||
107: {cat: 0xf, setID: 0xf},
|
||||
108: {cat: 0x65, setID: 0x3},
|
||||
109: {cat: 0x2f, setID: 0x6},
|
||||
110: {cat: 0x3a, setID: 0x7},
|
||||
111: {cat: 0x2f, setID: 0xe},
|
||||
112: {cat: 0x3c, setID: 0xf},
|
||||
113: {cat: 0x2d, setID: 0xa},
|
||||
114: {cat: 0x2d, setID: 0x17},
|
||||
115: {cat: 0x2d, setID: 0x18},
|
||||
116: {cat: 0x2f, setID: 0x6},
|
||||
117: {cat: 0x3a, setID: 0xb},
|
||||
118: {cat: 0x2f, setID: 0x19},
|
||||
119: {cat: 0x3c, setID: 0xb},
|
||||
120: {cat: 0x55, setID: 0x3},
|
||||
121: {cat: 0x22, setID: 0x1},
|
||||
122: {cat: 0x24, setID: 0x3},
|
||||
123: {cat: 0x2c, setID: 0xc},
|
||||
124: {cat: 0x2d, setID: 0xb},
|
||||
125: {cat: 0xf, setID: 0x6},
|
||||
126: {cat: 0x1f, setID: 0x7},
|
||||
127: {cat: 0x62, setID: 0x3},
|
||||
128: {cat: 0xf, setID: 0xe},
|
||||
129: {cat: 0x1f, setID: 0xf},
|
||||
130: {cat: 0x64, setID: 0x3},
|
||||
131: {cat: 0xf, setID: 0xa},
|
||||
132: {cat: 0x65, setID: 0x3},
|
||||
133: {cat: 0xf, setID: 0x17},
|
||||
134: {cat: 0x65, setID: 0x3},
|
||||
135: {cat: 0xf, setID: 0x18},
|
||||
136: {cat: 0x65, setID: 0x3},
|
||||
137: {cat: 0x2f, setID: 0x6},
|
||||
138: {cat: 0x3a, setID: 0x1a},
|
||||
139: {cat: 0x2f, setID: 0x1b},
|
||||
140: {cat: 0x3b, setID: 0x1c},
|
||||
141: {cat: 0x2f, setID: 0x1d},
|
||||
142: {cat: 0x3c, setID: 0x1e},
|
||||
143: {cat: 0x37, setID: 0x3},
|
||||
144: {cat: 0xa5, setID: 0x0},
|
||||
145: {cat: 0x22, setID: 0x1},
|
||||
146: {cat: 0x23, setID: 0x2},
|
||||
147: {cat: 0x24, setID: 0x1f},
|
||||
148: {cat: 0x25, setID: 0x20},
|
||||
149: {cat: 0xf, setID: 0x6},
|
||||
150: {cat: 0x62, setID: 0x3},
|
||||
151: {cat: 0xf, setID: 0x1b},
|
||||
152: {cat: 0x63, setID: 0x3},
|
||||
153: {cat: 0xf, setID: 0x21},
|
||||
154: {cat: 0x64, setID: 0x3},
|
||||
155: {cat: 0x75, setID: 0x3},
|
||||
156: {cat: 0x21, setID: 0x3},
|
||||
157: {cat: 0x22, setID: 0x1},
|
||||
158: {cat: 0x23, setID: 0x2},
|
||||
159: {cat: 0x2c, setID: 0x22},
|
||||
160: {cat: 0x2d, setID: 0x5},
|
||||
161: {cat: 0x21, setID: 0x3},
|
||||
162: {cat: 0x22, setID: 0x1},
|
||||
163: {cat: 0x23, setID: 0x2},
|
||||
164: {cat: 0x24, setID: 0x23},
|
||||
165: {cat: 0x25, setID: 0x24},
|
||||
} // Size: 356 bytes
|
||||
|
||||
var cardinalIndex = []uint8{ // 36 elements
|
||||
0x00, 0x00, 0x02, 0x03, 0x04, 0x06, 0x09, 0x0a,
|
||||
0x0c, 0x0d, 0x10, 0x14, 0x17, 0x1d, 0x28, 0x2b,
|
||||
0x2d, 0x2f, 0x32, 0x38, 0x42, 0x45, 0x4c, 0x55,
|
||||
0x5c, 0x61, 0x6d, 0x74, 0x79, 0x7d, 0x89, 0x91,
|
||||
0x95, 0x9c, 0xa1, 0xa6,
|
||||
} // Size: 60 bytes
|
||||
|
||||
var cardinalLangToIndex = []uint8{ // 775 elements
|
||||
// Entry 0 - 3F
|
||||
0x00, 0x08, 0x08, 0x08, 0x00, 0x00, 0x06, 0x06,
|
||||
0x01, 0x01, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21,
|
||||
0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21,
|
||||
0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21,
|
||||
0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21, 0x21,
|
||||
0x01, 0x01, 0x08, 0x08, 0x04, 0x04, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x00, 0x00, 0x1a, 0x1a, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x06, 0x00, 0x00,
|
||||
// Entry 40 - 7F
|
||||
0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x1e, 0x1e,
|
||||
0x08, 0x08, 0x13, 0x13, 0x13, 0x13, 0x13, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x00, 0x00, 0x00, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x18, 0x18, 0x00, 0x00, 0x22, 0x22, 0x09, 0x09,
|
||||
0x09, 0x00, 0x00, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x00, 0x00, 0x16, 0x16, 0x00,
|
||||
0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 80 - BF
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
// Entry C0 - FF
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
// Entry 100 - 13F
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x04, 0x04,
|
||||
0x08, 0x08, 0x00, 0x00, 0x01, 0x01, 0x01, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x04, 0x04, 0x0c, 0x0c,
|
||||
0x08, 0x08, 0x08, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
// Entry 140 - 17F
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
|
||||
0x02, 0x02, 0x08, 0x08, 0x04, 0x04, 0x1f, 0x1f,
|
||||
0x14, 0x14, 0x04, 0x04, 0x08, 0x08, 0x08, 0x08,
|
||||
0x01, 0x01, 0x06, 0x00, 0x00, 0x20, 0x20, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x17, 0x17, 0x01,
|
||||
0x01, 0x13, 0x13, 0x13, 0x16, 0x16, 0x08, 0x08,
|
||||
0x02, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 180 - 1BF
|
||||
0x00, 0x04, 0x0a, 0x0a, 0x04, 0x04, 0x04, 0x04,
|
||||
0x04, 0x10, 0x17, 0x00, 0x00, 0x00, 0x08, 0x08,
|
||||
0x04, 0x08, 0x08, 0x00, 0x00, 0x08, 0x08, 0x02,
|
||||
0x02, 0x08, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x00, 0x00, 0x00, 0x00, 0x01,
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x08,
|
||||
0x08, 0x08, 0x00, 0x00, 0x0f, 0x0f, 0x08, 0x10,
|
||||
// Entry 1C0 - 1FF
|
||||
0x10, 0x08, 0x08, 0x0e, 0x0e, 0x08, 0x08, 0x08,
|
||||
0x08, 0x00, 0x00, 0x06, 0x06, 0x06, 0x06, 0x06,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x1b, 0x1b, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x0d, 0x0d, 0x08,
|
||||
0x08, 0x08, 0x00, 0x00, 0x00, 0x00, 0x06, 0x06,
|
||||
0x00, 0x00, 0x08, 0x08, 0x0b, 0x0b, 0x08, 0x08,
|
||||
0x08, 0x08, 0x12, 0x01, 0x01, 0x00, 0x00, 0x00,
|
||||
0x00, 0x1c, 0x1c, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 200 - 23F
|
||||
0x00, 0x08, 0x10, 0x10, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x00, 0x00, 0x00, 0x08, 0x08, 0x08, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x00,
|
||||
0x00, 0x08, 0x08, 0x08, 0x08, 0x08, 0x00, 0x08,
|
||||
0x06, 0x00, 0x00, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x06, 0x06,
|
||||
0x06, 0x06, 0x06, 0x08, 0x19, 0x19, 0x0d, 0x0d,
|
||||
0x08, 0x08, 0x03, 0x04, 0x03, 0x04, 0x04, 0x04,
|
||||
// Entry 240 - 27F
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x00,
|
||||
0x00, 0x00, 0x00, 0x08, 0x08, 0x00, 0x00, 0x12,
|
||||
0x12, 0x12, 0x08, 0x08, 0x1d, 0x1d, 0x1d, 0x1d,
|
||||
0x1d, 0x1d, 0x1d, 0x00, 0x00, 0x08, 0x08, 0x00,
|
||||
0x00, 0x08, 0x08, 0x00, 0x00, 0x08, 0x08, 0x08,
|
||||
0x10, 0x10, 0x10, 0x10, 0x08, 0x08, 0x00, 0x00,
|
||||
0x00, 0x00, 0x13, 0x11, 0x11, 0x11, 0x11, 0x11,
|
||||
0x05, 0x05, 0x18, 0x18, 0x15, 0x15, 0x10, 0x10,
|
||||
// Entry 280 - 2BF
|
||||
0x10, 0x10, 0x10, 0x10, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x13,
|
||||
0x13, 0x13, 0x13, 0x13, 0x13, 0x13, 0x13, 0x13,
|
||||
0x13, 0x13, 0x08, 0x08, 0x08, 0x04, 0x04, 0x04,
|
||||
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x08, 0x08,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
|
||||
0x08, 0x00, 0x00, 0x00, 0x00, 0x06, 0x06, 0x06,
|
||||
0x08, 0x08, 0x08, 0x0c, 0x08, 0x00, 0x00, 0x08,
|
||||
// Entry 2C0 - 2FF
|
||||
0x08, 0x08, 0x08, 0x00, 0x00, 0x00, 0x00, 0x07,
|
||||
0x07, 0x08, 0x08, 0x1d, 0x1d, 0x04, 0x04, 0x04,
|
||||
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x08,
|
||||
0x08, 0x08, 0x08, 0x06, 0x08, 0x08, 0x00, 0x00,
|
||||
0x08, 0x08, 0x08, 0x00, 0x00, 0x04, 0x04, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
// Entry 300 - 33F
|
||||
0x00, 0x00, 0x00, 0x01, 0x01, 0x04, 0x04,
|
||||
} // Size: 799 bytes
|
||||
|
||||
var cardinalInclusionMasks = []uint64{ // 100 elements
|
||||
// Entry 0 - 1F
|
||||
0x0000000200500419, 0x0000000000512153, 0x000000000a327105, 0x0000000ca23c7101,
|
||||
0x00000004a23c7201, 0x0000000482943001, 0x0000001482943201, 0x0000000502943001,
|
||||
0x0000000502943001, 0x0000000522943201, 0x0000000540543401, 0x00000000454128e1,
|
||||
0x000000005b02e821, 0x000000006304e821, 0x000000006304ea21, 0x0000000042842821,
|
||||
0x0000000042842a21, 0x0000000042842821, 0x0000000042842821, 0x0000000062842a21,
|
||||
0x0000000200400421, 0x0000000000400061, 0x000000000a004021, 0x0000000022004021,
|
||||
0x0000000022004221, 0x0000000002800021, 0x0000000002800221, 0x0000000002800021,
|
||||
0x0000000002800021, 0x0000000022800221, 0x0000000000400421, 0x0000000000400061,
|
||||
// Entry 20 - 3F
|
||||
0x000000000a004021, 0x0000000022004021, 0x0000000022004221, 0x0000000002800021,
|
||||
0x0000000002800221, 0x0000000002800021, 0x0000000002800021, 0x0000000022800221,
|
||||
0x0000000200400421, 0x0000000000400061, 0x000000000a004021, 0x0000000022004021,
|
||||
0x0000000022004221, 0x0000000002800021, 0x0000000002800221, 0x0000000002800021,
|
||||
0x0000000002800021, 0x0000000022800221, 0x0000000000400421, 0x0000000000400061,
|
||||
0x000000000a004021, 0x0000000022004021, 0x0000000022004221, 0x0000000002800021,
|
||||
0x0000000002800221, 0x0000000002800021, 0x0000000002800021, 0x0000000022800221,
|
||||
0x0000000200400421, 0x0000000000400061, 0x000000000a004021, 0x0000000022004021,
|
||||
// Entry 40 - 5F
|
||||
0x0000000022004221, 0x0000000002800021, 0x0000000002800221, 0x0000000002800021,
|
||||
0x0000000002800021, 0x0000000022800221, 0x0000000040400421, 0x0000000044400061,
|
||||
0x000000005a004021, 0x0000000062004021, 0x0000000062004221, 0x0000000042800021,
|
||||
0x0000000042800221, 0x0000000042800021, 0x0000000042800021, 0x0000000062800221,
|
||||
0x0000000200400421, 0x0000000000400061, 0x000000000a004021, 0x0000000022004021,
|
||||
0x0000000022004221, 0x0000000002800021, 0x0000000002800221, 0x0000000002800021,
|
||||
0x0000000002800021, 0x0000000022800221, 0x0000000040400421, 0x0000000044400061,
|
||||
0x000000005a004021, 0x0000000062004021, 0x0000000062004221, 0x0000000042800021,
|
||||
// Entry 60 - 7F
|
||||
0x0000000042800221, 0x0000000042800021, 0x0000000042800021, 0x0000000062800221,
|
||||
} // Size: 824 bytes
|
||||
|
||||
// Slots used for cardinal: A6 of 0xFF rules; 24 of 0xFF indexes; 37 of 64 sets
|
||||
|
||||
// Total table size 3860 bytes (3KiB); checksum: AAFBF21
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue