mirror of
https://github.com/yannh/kubeconform.git
synced 2026-02-20 02:17:01 +00:00
Merge pull request #75 from yannh/openapi2jsonschema-tests
Add simple openapi2jsonschema unit tests
This commit is contained in:
commit
ff7da0f73b
4 changed files with 84 additions and 65 deletions
4
.github/workflows/main.yml
vendored
4
.github/workflows/main.yml
vendored
|
|
@ -22,9 +22,9 @@ jobs:
|
||||||
- name: checkout
|
- name: checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: acceptance-test
|
- name: test
|
||||||
working-directory: ./scripts
|
working-directory: ./scripts
|
||||||
run: make docker-acceptance
|
run: make docker-test docker-acceptance
|
||||||
|
|
||||||
goreleaser:
|
goreleaser:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
||||||
|
|
@ -2,8 +2,13 @@
|
||||||
|
|
||||||
# This is really early days
|
# This is really early days
|
||||||
|
|
||||||
|
test: build-python-bats docker-test docker-acceptance
|
||||||
|
|
||||||
build-python-bats:
|
build-python-bats:
|
||||||
docker build -t python-bats -f Dockerfile.bats .
|
docker build -t python-bats -f Dockerfile.bats .
|
||||||
|
|
||||||
|
docker-test: build-python-bats
|
||||||
|
docker run --entrypoint "/usr/local/bin/pytest" -t python-bats openapi2jsonschema.py
|
||||||
|
|
||||||
docker-acceptance: build-python-bats
|
docker-acceptance: build-python-bats
|
||||||
docker run --entrypoint "/usr/bin/bats" -t python-bats /code/acceptance.bats
|
docker run --entrypoint "/usr/bin/bats" -t python-bats /code/acceptance.bats
|
||||||
|
|
|
||||||
|
|
@ -1,24 +1,26 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# Derived from https://github.com/instrumenta/openapi2jsonschema
|
# Derived from https://github.com/instrumenta/openapi2jsonschema
|
||||||
import yaml
|
import yaml
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import urllib.request
|
|
||||||
|
|
||||||
def iteritems(d):
|
|
||||||
if hasattr(dict, "iteritems"):
|
|
||||||
return d.iteritems()
|
|
||||||
else:
|
|
||||||
return iter(d.items())
|
|
||||||
|
|
||||||
|
def test_additional_properties():
|
||||||
|
for test in iter([{
|
||||||
|
"input": {"something": {"properties": {}}},
|
||||||
|
"expect": {'something': {'properties': {}, "additionalProperties": False}}
|
||||||
|
},{
|
||||||
|
"input": {"something": {"somethingelse": {}}},
|
||||||
|
"expect": {'something': {'somethingelse': {}}}
|
||||||
|
}]):
|
||||||
|
assert additional_properties(test["input"]) == test["expect"]
|
||||||
|
|
||||||
def additional_properties(data):
|
def additional_properties(data):
|
||||||
"This recreates the behaviour of kubectl at https://github.com/kubernetes/kubernetes/blob/225b9119d6a8f03fcbe3cc3d590c261965d928d0/pkg/kubectl/validation/schema.go#L312"
|
"This recreates the behaviour of kubectl at https://github.com/kubernetes/kubernetes/blob/225b9119d6a8f03fcbe3cc3d590c261965d928d0/pkg/kubectl/validation/schema.go#L312"
|
||||||
new = {}
|
new = {}
|
||||||
try:
|
try:
|
||||||
for k, v in iteritems(data):
|
for k, v in iter(data.items()):
|
||||||
new_v = v
|
new_v = v
|
||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
if "properties" in v:
|
if "properties" in v:
|
||||||
|
|
@ -32,11 +34,20 @@ def additional_properties(data):
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def test_replace_int_or_string():
|
||||||
|
for test in iter([{
|
||||||
|
"input": {"something": {"format": "int-or-string"}},
|
||||||
|
"expect": {'something': {'oneOf': [{'type': 'string'}, {'type': 'integer'}]}}
|
||||||
|
},{
|
||||||
|
"input": {"something": {"format": "string"}},
|
||||||
|
"expect": {"something": {"format": "string"}},
|
||||||
|
}]):
|
||||||
|
assert replace_int_or_string(test["input"]) == test["expect"]
|
||||||
|
|
||||||
def replace_int_or_string(data):
|
def replace_int_or_string(data):
|
||||||
new = {}
|
new = {}
|
||||||
try:
|
try:
|
||||||
for k, v in iteritems(data):
|
for k, v in iter(data.items()):
|
||||||
new_v = v
|
new_v = v
|
||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
if "format" in v and v["format"] == "int-or-string":
|
if "format" in v and v["format"] == "int-or-string":
|
||||||
|
|
@ -54,11 +65,10 @@ def replace_int_or_string(data):
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def allow_null_optional_fields(data, parent=None, grand_parent=None, key=None):
|
def allow_null_optional_fields(data, parent=None, grand_parent=None, key=None):
|
||||||
new = {}
|
new = {}
|
||||||
try:
|
try:
|
||||||
for k, v in iteritems(data):
|
for k, v in iter(data.items()):
|
||||||
new_v = v
|
new_v = v
|
||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
new_v = allow_null_optional_fields(v, data, parent, k)
|
new_v = allow_null_optional_fields(v, data, parent, k)
|
||||||
|
|
@ -103,60 +113,63 @@ def write_schema_file(schema, filename):
|
||||||
print("JSON schema written to {filename}".format(filename=filename))
|
print("JSON schema written to {filename}".format(filename=filename))
|
||||||
|
|
||||||
|
|
||||||
if len(sys.argv) == 0:
|
|
||||||
print("missing file")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
for crdFile in sys.argv[1:]:
|
|
||||||
if crdFile.startswith("http"):
|
|
||||||
f = urllib.request.urlopen(crdFile)
|
|
||||||
else:
|
|
||||||
f = open(crdFile)
|
|
||||||
with f:
|
|
||||||
defs = []
|
|
||||||
for y in yaml.load_all(f, Loader=yaml.SafeLoader):
|
|
||||||
if y is None:
|
|
||||||
continue
|
|
||||||
if "items" in y:
|
|
||||||
defs.extend(y["items"])
|
|
||||||
if "kind" not in y:
|
|
||||||
continue
|
|
||||||
if y["kind"] != "CustomResourceDefinition":
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
defs.append(y)
|
|
||||||
|
|
||||||
for y in defs:
|
if __name__ == "__main__":
|
||||||
filename_format = os.getenv("FILENAME_FORMAT", "{kind}_{version}")
|
if len(sys.argv) == 0:
|
||||||
filename = ""
|
print("missing file")
|
||||||
if "spec" in y and "versions" in y["spec"] and y["spec"]["versions"]:
|
exit(1)
|
||||||
for version in y["spec"]["versions"]:
|
|
||||||
if "schema" in version and "openAPIV3Schema" in version["schema"]:
|
|
||||||
filename = filename_format.format(
|
|
||||||
kind=y["spec"]["names"]["kind"],
|
|
||||||
group=y["spec"]["group"].split(".")[0],
|
|
||||||
version=version["name"],
|
|
||||||
).lower() + ".json"
|
|
||||||
|
|
||||||
schema = version["schema"]["openAPIV3Schema"]
|
for crdFile in sys.argv[1:]:
|
||||||
write_schema_file(schema, filename)
|
if crdFile.startswith("http"):
|
||||||
elif "validation" in y["spec"] and "openAPIV3Schema" in y["spec"]["validation"]:
|
f = urllib.request.urlopen(crdFile)
|
||||||
filename = filename_format.format(
|
else:
|
||||||
kind=y["spec"]["names"]["kind"],
|
f = open(crdFile)
|
||||||
group=y["spec"]["group"].split(".")[0],
|
with f:
|
||||||
version=version["name"],
|
defs = []
|
||||||
).lower() + ".json"
|
for y in yaml.load_all(f, Loader=yaml.SafeLoader):
|
||||||
|
if y is None:
|
||||||
|
continue
|
||||||
|
if "items" in y:
|
||||||
|
defs.extend(y["items"])
|
||||||
|
if "kind" not in y:
|
||||||
|
continue
|
||||||
|
if y["kind"] != "CustomResourceDefinition":
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
defs.append(y)
|
||||||
|
|
||||||
schema = y["spec"]["validation"]["openAPIV3Schema"]
|
for y in defs:
|
||||||
write_schema_file(schema, filename)
|
filename_format = os.getenv("FILENAME_FORMAT", "{kind}_{version}")
|
||||||
elif "spec" in y and "validation" in y["spec"] and "openAPIV3Schema" in y["spec"]["validation"]:
|
filename = ""
|
||||||
filename = filename_format.format(
|
if "spec" in y and "versions" in y["spec"] and y["spec"]["versions"]:
|
||||||
kind=y["spec"]["names"]["kind"],
|
for version in y["spec"]["versions"]:
|
||||||
group=y["spec"]["group"].split(".")[0],
|
if "schema" in version and "openAPIV3Schema" in version["schema"]:
|
||||||
version=y["spec"]["version"],
|
filename = filename_format.format(
|
||||||
).lower() + ".json"
|
kind=y["spec"]["names"]["kind"],
|
||||||
|
group=y["spec"]["group"].split(".")[0],
|
||||||
|
version=version["name"],
|
||||||
|
).lower() + ".json"
|
||||||
|
|
||||||
schema = y["spec"]["validation"]["openAPIV3Schema"]
|
schema = version["schema"]["openAPIV3Schema"]
|
||||||
write_schema_file(schema, filename)
|
write_schema_file(schema, filename)
|
||||||
|
elif "validation" in y["spec"] and "openAPIV3Schema" in y["spec"]["validation"]:
|
||||||
|
filename = filename_format.format(
|
||||||
|
kind=y["spec"]["names"]["kind"],
|
||||||
|
group=y["spec"]["group"].split(".")[0],
|
||||||
|
version=version["name"],
|
||||||
|
).lower() + ".json"
|
||||||
|
|
||||||
exit(0)
|
schema = y["spec"]["validation"]["openAPIV3Schema"]
|
||||||
|
write_schema_file(schema, filename)
|
||||||
|
elif "spec" in y and "validation" in y["spec"] and "openAPIV3Schema" in y["spec"]["validation"]:
|
||||||
|
filename = filename_format.format(
|
||||||
|
kind=y["spec"]["names"]["kind"],
|
||||||
|
group=y["spec"]["group"].split(".")[0],
|
||||||
|
version=y["spec"]["version"],
|
||||||
|
).lower() + ".json"
|
||||||
|
|
||||||
|
schema = y["spec"]["validation"]["openAPIV3Schema"]
|
||||||
|
write_schema_file(schema, filename)
|
||||||
|
|
||||||
|
exit(0)
|
||||||
|
|
|
||||||
|
|
@ -1 +1,2 @@
|
||||||
pyyaml
|
pyyaml
|
||||||
|
pytest
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue