Skip to content
This repository has been archived by the owner on Nov 19, 2023. It is now read-only.

Commit

Permalink
Merge pull request #229 from snok/fix/208-test-indeterminancy
Browse files Browse the repository at this point in the history
fix/208: updated handling of anyOf
  • Loading branch information
Goldziher authored Feb 28, 2021
2 parents 43db85e + b11ea6a commit 21d7eca
Show file tree
Hide file tree
Showing 6 changed files with 49 additions and 85 deletions.
32 changes: 5 additions & 27 deletions openapi_tester/schema_tester.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
""" Schema Tester """
from functools import reduce
from itertools import chain
from typing import Any, Callable, Dict, List, Optional, Union, cast

from django.conf import settings
Expand All @@ -19,7 +19,7 @@
)
from openapi_tester.exceptions import DocumentationError, UndocumentedSchemaSectionError
from openapi_tester.loaders import DrfSpectacularSchemaLoader, DrfYasgSchemaLoader, StaticSchemaLoader
from openapi_tester.utils import combine_sub_schemas, merge_objects
from openapi_tester.utils import lazy_combinations, normalize_schema_section
from openapi_tester.validators import (
validate_enum,
validate_format,
Expand Down Expand Up @@ -152,15 +152,6 @@ def get_response_schema_section(self, response: td.Response) -> Dict[str, Any]:
)
return self.get_key_value(json_object, "schema")

def handle_all_of(self, schema_section: dict, data: Any, reference: str, **kwargs: Any) -> None:
all_of = schema_section.pop("allOf")
self.test_schema_section(
schema_section={**schema_section, **combine_sub_schemas(all_of)},
data=data,
reference=f"{reference}.allOf",
**kwargs,
)

def handle_one_of(self, schema_section: dict, data: Any, reference: str, **kwargs: Any):
matches = 0
for option in schema_section["oneOf"]:
Expand All @@ -174,12 +165,7 @@ def handle_one_of(self, schema_section: dict, data: Any, reference: str, **kwarg

def handle_any_of(self, schema_section: dict, data: Any, reference: str, **kwargs: Any):
any_of: List[Dict[str, Any]] = schema_section.get("anyOf", [])
combined_sub_schemas = map(
lambda index: reduce(lambda x, y: combine_sub_schemas([x, y]), any_of[index:]),
range(len(any_of)),
)

for schema in [*any_of, *combined_sub_schemas]:
for schema in chain(any_of, lazy_combinations(any_of)):
try:
self.test_schema_section(schema_section=schema, data=data, reference=f"{reference}.anyOf", **kwargs)
return
Expand Down Expand Up @@ -236,17 +222,9 @@ def test_schema_section(
f"Reference: {reference}\n\n"
f"Hint: Return a valid type, or document the value as nullable"
)

schema_section = normalize_schema_section(schema_section)
if "oneOf" in schema_section:
if schema_section["oneOf"] and all(item.get("enum") for item in schema_section["oneOf"]):
# handle the way drf-spectacular is doing enums
one_of = schema_section.pop("oneOf")
schema_section = {**schema_section, **merge_objects(one_of)}
else:
self.handle_one_of(schema_section=schema_section, data=data, reference=reference, **kwargs)
return
if "allOf" in schema_section:
self.handle_all_of(schema_section=schema_section, data=data, reference=reference, **kwargs)
self.handle_one_of(schema_section=schema_section, data=data, reference=reference, **kwargs)
return
if "anyOf" in schema_section:
self.handle_any_of(schema_section=schema_section, data=data, reference=reference, **kwargs)
Expand Down
50 changes: 28 additions & 22 deletions openapi_tester/utils.py
Original file line number Diff line number Diff line change
@@ -1,41 +1,47 @@
""" Utils Module - this file contains utility functions used in multiple places """
from typing import Any, Dict, Iterable, List
from copy import deepcopy
from itertools import chain, combinations
from typing import Any, Dict, Iterator, Sequence


def merge_objects(dictionaries: List[Dict[str, Any]]) -> Dict[str, Any]:
def merge_objects(dictionaries: Sequence[Dict[str, Any]]) -> Dict[str, Any]:
""" helper function to deep merge objects """
output: Dict[str, Any] = {}
for dictionary in dictionaries:
for key, value in dictionary.items():
if isinstance(value, dict) and "allOf" in value:
all_of = merge_objects(value.pop("allOf"))
value = merge_objects([value, all_of])
if key not in output:
output[key] = value
continue
current_value = output[key]
if isinstance(current_value, list) and isinstance(value, list):
output[key] = list({*output[key], *value})
output[key] = list(chain(output[key], value))
continue
if isinstance(current_value, dict) and isinstance(value, dict):
output[key] = merge_objects([current_value, value])
continue
return output


def combine_object_schemas(schemas: List[dict]) -> Dict[str, Any]:
properties = merge_objects([schema.get("properties", {}) for schema in schemas])
required_list = [schema.get("required", []) for schema in schemas]
required = list({key for required in required_list for key in required})
return {"type": "object", "required": required, "properties": properties}
def normalize_schema_section(schema_section: dict) -> dict:
""" helper method to remove allOf and handle edge uses of oneOf"""
output: Dict[str, Any] = deepcopy(schema_section)
if output.get("allOf"):
all_of = output.pop("allOf")
output = {**output, **merge_objects(all_of)}
if output.get("oneOf") and all(item.get("enum") for item in output["oneOf"]):
# handle the way drf-spectacular is doing enums
one_of = output.pop("oneOf")
output = {**output, **merge_objects(one_of)}
for key, value in output.items():
if isinstance(value, dict):
output[key] = normalize_schema_section(value)
elif isinstance(value, list):
output[key] = [normalize_schema_section(entry) if isinstance(entry, dict) else entry for entry in value]
return output


def combine_sub_schemas(schemas: Iterable[Dict[str, Any]]) -> Dict[str, Any]:
array_schemas = [schema for schema in schemas if schema.get("type") == "array"]
object_schemas = [schema for schema in schemas if schema.get("type") == "object" or not schema.get("type")]
if array_schemas:
return {
"type": "array",
"items": combine_sub_schemas([schema.get("items", {}) for schema in array_schemas]),
}
if object_schemas:
return combine_object_schemas(object_schemas)
return merge_objects([schema for schema in schemas if schema.get("type") not in ["object", "array"]])
def lazy_combinations(options_list: Sequence[Dict[str, Any]]) -> Iterator[dict]:
""" helper to lazy evaluate possible permutations of possible combinations """
for i in range(2, len(options_list) + 1):
for combination in combinations(options_list, i):
yield merge_objects(combination)
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ max-branches = 20
max-locals = 20

[tool.pylint.BASIC]
good-names = "_,e"
good-names = "_,e,i"

[tool.coverage.run]
source = ["openapi_tester/*"]
Expand Down
27 changes: 10 additions & 17 deletions tests/schema_converter.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
""" Schema to Python converter """
import base64
import random
from copy import deepcopy
from datetime import datetime
from typing import Any, Dict, List, Optional, Union

from faker import Faker

from openapi_tester.utils import combine_sub_schemas, merge_objects
from openapi_tester.utils import merge_objects, normalize_schema_section


class SchemaToPythonConverter:
Expand All @@ -20,27 +21,19 @@ class SchemaToPythonConverter:
def __init__(self, schema: dict):
Faker.seed(0)
self.faker = Faker()
self.result = self.convert_schema(schema)
self.result = self.convert_schema(deepcopy(schema))

def convert_schema(self, schema: Dict[str, Any]) -> Any:
schema_type = schema.get("type", "object")
sample: List[Dict[str, Any]] = []
if "allOf" in schema:
all_of = schema.pop("allOf")
return self.convert_schema({**schema, **combine_sub_schemas(all_of)})
schema = normalize_schema_section(schema)
if "oneOf" in schema:
one_of = schema.pop("oneOf")
if all(item.get("enum") for item in one_of):
# this is meant to handle the way drf-spectacular does enums
return self.convert_schema({**schema, **merge_objects(one_of)})
while not sample:
sample = random.sample(one_of, 1)
return self.convert_schema({**schema, **sample[0]})
return self.convert_schema({**schema, **random.sample(one_of, 1)[0]})
if "anyOf" in schema:
any_of = schema.pop("anyOf")
while not sample:
sample = random.sample(any_of, random.randint(1, len(any_of)))
return self.convert_schema({**schema, **combine_sub_schemas(sample)})
return self.convert_schema(
{**schema, **merge_objects(random.sample(any_of, random.randint(1, len(any_of))))}
)
if schema_type == "array":
return self.convert_schema_array_to_list(schema)
if schema_type == "object":
Expand Down Expand Up @@ -80,9 +73,9 @@ def schema_type_to_mock_value(self, schema_object: Dict[str, Any]) -> Any:
return random.sample(enum, 1)[0]
if schema_type in ["integer", "number"] and (minimum is not None or maximum is not None):
if minimum is not None:
minimum += 1 if schema_object.get("excludeMinimum") else 0
minimum += 1 if schema_object.get("exclusiveMinimum") else 0
if maximum is not None:
maximum -= 1 if schema_object.get("excludeMaximum") else 0
maximum -= 1 if schema_object.get("exclusiveMaximum") else 0
if minimum is not None or maximum is not None:
minimum = minimum or 0
maximum = maximum or minimum * 2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,10 +104,9 @@ components:
type: string
format: byte
price:
minimum: 0.1
maximum: 1.0
type: number
format: float
minimum: 0
maximum: 10
type: integer

Alien:
type: object
Expand Down
16 changes: 2 additions & 14 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from openapi_tester.utils import combine_sub_schemas, merge_objects
from openapi_tester.utils import merge_objects
from tests.utils import sort_object

object_1 = {"type": "object", "required": ["key1"], "properties": {"key1": {"type": "string"}}}
Expand All @@ -19,26 +19,14 @@ def test_documentation_error_sort_data_type():
assert sort_object(["1", {}, []]) == ["1", {}, []]


def test_combine_sub_schemas_array_list():
test_schemas = [{"type": "array", "items": {"type": "string"}}, {"type": "array", "items": {"type": "integer"}}]
expected = {"type": "array", "items": {"type": "string"}}
assert sort_object(combine_sub_schemas(test_schemas)) == sort_object(expected)


def test_combine_sub_schemas_object_list():
test_schemas = [object_1, object_2]
assert sort_object(combine_sub_schemas(test_schemas)) == sort_object({**merged_object})


def test_merge_objects():
test_schemas = [
object_1,
object_2,
{"type": "object", "properties": {"key3": {"allOf": [object_1, object_2]}}},
]
expected = {
"type": "object",
"required": ["key1", "key2"],
"properties": {"key1": {"type": "string"}, "key2": {"type": "string"}, "key3": merged_object},
"properties": {"key1": {"type": "string"}, "key2": {"type": "string"}},
}
assert sort_object(merge_objects(test_schemas)) == sort_object(expected)

0 comments on commit 21d7eca

Please sign in to comment.