fix(jambo): Fix allOf, anyOf, null and array type parsing #33
108
docs/source/usage.oneof.rst
Normal file
@@ -0,0 +1,108 @@
|
||||
OneOf Type
|
||||
=================
|
||||
|
||||
The OneOf type is used to specify that an object must conform to exactly one of the specified schemas. Unlike AnyOf which allows matching multiple schemas, OneOf enforces that the data matches one and only one of the provided schemas.
|
||||
|
||||
|
||||
Examples
|
||||
-----------------
|
||||
|
||||
1. **Overlapping String Example** - A field that accepts strings with overlapping constraints:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from jambo import SchemaConverter
|
||||
|
||||
schema = {
|
||||
"title": "SimpleExample",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"oneOf": [
|
||||
{"type": "string", "maxLength": 6},
|
||||
{"type": "string", "minLength": 4}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": ["value"]
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
# Valid: Short string (matches first schema only)
|
||||
obj1 = Model(value="hi")
|
||||
print(obj1.value) # Output: hi
|
||||
|
||||
# Valid: Long string (matches second schema only)
|
||||
obj2 = Model(value="very long string")
|
||||
print(obj2.value) # Output: very long string
|
||||
|
||||
# Invalid: Medium string (matches BOTH schemas - violates oneOf)
|
||||
try:
|
||||
obj3 = Model(value="hello") # 5 chars: matches maxLength=6 AND minLength=4
|
||||
except ValueError as e:
|
||||
print("Validation fails as expected:", e)
|
||||
|
||||
|
||||
2. **Discriminator Example** - Different shapes with a type field:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from jambo import SchemaConverter
|
||||
|
||||
schema = {
|
||||
"title": "Shape",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"shape": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "circle"},
|
||||
"radius": {"type": "number", "minimum": 0}
|
||||
},
|
||||
"required": ["type", "radius"]
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "rectangle"},
|
||||
"width": {"type": "number", "minimum": 0},
|
||||
"height": {"type": "number", "minimum": 0}
|
||||
},
|
||||
"required": ["type", "width", "height"]
|
||||
}
|
||||
],
|
||||
"discriminator": {
|
||||
"propertyName": "type"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["shape"]
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
# Valid: Circle
|
||||
circle = Model(shape={"type": "circle", "radius": 5.0})
|
||||
print(circle.shape.type) # Output: circle
|
||||
|
||||
# Valid: Rectangle
|
||||
rectangle = Model(shape={"type": "rectangle", "width": 10, "height": 20})
|
||||
print(rectangle.shape.type) # Output: rectangle
|
||||
|
||||
# Invalid: Wrong properties for the type
|
||||
try:
|
||||
invalid = Model(shape={"type": "circle", "width": 10})
|
||||
except ValueError as e:
|
||||
print("Validation fails as expected:", e)
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
OneOf ensures exactly one schema matches. The discriminator helps Pydantic efficiently determine which schema to use based on a specific property value.
|
||||
|
||||
.. warning::
|
||||
|
||||
If your data could match multiple schemas in a oneOf, validation will fail. Ensure schemas are mutually exclusive.
|
||||
@@ -45,5 +45,6 @@ For more complex schemas and types see our documentation on
|
||||
usage.reference
|
||||
usage.allof
|
||||
usage.anyof
|
||||
usage.oneof
|
||||
usage.enum
|
||||
usage.const
|
||||
@@ -7,7 +7,9 @@ from .const_type_parser import ConstTypeParser
|
||||
from .enum_type_parser import EnumTypeParser
|
||||
from .float_type_parser import FloatTypeParser
|
||||
from .int_type_parser import IntTypeParser
|
||||
from .null_type_parser import NullTypeParser
|
||||
from .object_type_parser import ObjectTypeParser
|
||||
from .oneof_type_parser import OneOfTypeParser
|
||||
from .ref_type_parser import RefTypeParser
|
||||
from .string_type_parser import StringTypeParser
|
||||
|
||||
@@ -22,7 +24,9 @@ __all__ = [
|
||||
"BooleanTypeParser",
|
||||
"FloatTypeParser",
|
||||
"IntTypeParser",
|
||||
"NullTypeParser",
|
||||
"ObjectTypeParser",
|
||||
"OneOfTypeParser",
|
||||
"StringTypeParser",
|
||||
"RefTypeParser",
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from pydantic import Field, TypeAdapter
|
||||
from typing_extensions import Annotated, Any, Generic, Self, TypeVar, Unpack
|
||||
from typing import Annotated, Any, Generic, Self, TypeVar, Unpack
|
||||
|
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
@@ -47,7 +47,7 @@ class GenericTypeParser(ABC, Generic[T]):
|
||||
|
||||
if not self._validate_default(parsed_type, parsed_properties):
|
||||
raise ValueError(
|
||||
f"Default value {properties.get('default')} is not valid for type {parsed_type.__name__}"
|
||||
f"Default value {properties.get('default')} is not valid for type {parsed_type}"
|
||||
)
|
||||
|
||||
return parsed_type, parsed_properties
|
||||
@@ -124,3 +124,12 @@ class GenericTypeParser(ABC, Generic[T]):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _has_meaningful_constraints(field_props):
|
||||
if not field_props:
|
||||
return False
|
||||
|
||||
if field_props == {"default": None}:
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from jambo.parser._type_parser import GenericTypeParser
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from typing_extensions import Any, Unpack
|
||||
from typing import Any, Unpack
|
||||
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
class AllOfTypeParser(GenericTypeParser):
|
||||
@@ -74,10 +74,10 @@ class AllOfTypeParser(GenericTypeParser):
|
||||
return old_value + new_value
|
||||
|
||||
if prop_name in ("maxLength", "maximum", "exclusiveMaximum"):
|
||||
return old_value if old_value > new_value else new_value
|
||||
return old_value if old_value < new_value else new_value
|
||||
|
||||
if prop_name in ("minLength", "minimum", "exclusiveMinimum"):
|
||||
return old_value if old_value < new_value else new_value
|
||||
return old_value if old_value > new_value else new_value
|
||||
|
||||
if prop_name == "properties":
|
||||
for key, value in new_value.items():
|
||||
|
||||
@@ -2,11 +2,14 @@ from jambo.parser._type_parser import GenericTypeParser
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from pydantic import Field
|
||||
from typing_extensions import Annotated, Union, Unpack
|
||||
from typing import Annotated, Unpack
|
||||
from types import UnionType
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
from functools import reduce
|
||||
from operator import or_
|
||||
|
||||
|
||||
class AnyOfTypeParser(GenericTypeParser):
|
||||
mapped_type = Union
|
||||
mapped_type = UnionType
|
||||
|
||||
json_schema_type = "anyOf"
|
||||
|
||||
@@ -34,8 +37,13 @@ class AnyOfTypeParser(GenericTypeParser):
|
||||
# By defining the type as Union of Annotated type we can use the Field validator
|
||||
# to enforce the constraints of each union type when needed.
|
||||
# We use Annotated to attach the Field validators to the type.
|
||||
# Only wrap in Annotated[T, Field(**v)] if there are meaningful field constraints
|
||||
# Don't wrap for simple cases where v only contains {'default': None}
|
||||
field_types = [
|
||||
Annotated[t, Field(**v)] if v is not None else t for t, v in sub_types
|
||||
Annotated[t, Field(**v)] if self._has_meaningful_constraints(v) else t
|
||||
for t, v in sub_types
|
||||
]
|
||||
|
||||
return Union[(*field_types,)], mapped_properties
|
||||
union_type = reduce(or_, field_types)
|
||||
|
||||
return union_type, mapped_properties
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from jambo.parser._type_parser import GenericTypeParser
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from typing_extensions import Iterable, TypeVar, Unpack
|
||||
from typing import Iterable, TypeVar, Unpack
|
||||
|
||||
import copy
|
||||
|
||||
@@ -35,10 +35,15 @@ class ArrayTypeParser(GenericTypeParser):
|
||||
|
||||
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
|
||||
|
||||
if "default" not in mapped_properties:
|
||||
if not kwargs.get("required", False) and "default" not in mapped_properties:
|
||||
mapped_properties["default_factory"] = self._build_default_factory(
|
||||
properties.get("default"), wrapper_type
|
||||
)
|
||||
elif "default" in properties:
|
||||
mapped_properties["default_factory"] = self._build_default_factory(
|
||||
properties["default"], wrapper_type
|
||||
)
|
||||
mapped_properties.pop("default", None)
|
||||
|
||||
return field_type, mapped_properties
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from jambo.parser._type_parser import GenericTypeParser
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from typing_extensions import Unpack
|
||||
from typing import Unpack
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
|
||||
class BooleanTypeParser(GenericTypeParser):
|
||||
|
||||
@@ -3,7 +3,7 @@ from jambo.types.json_schema_type import JSONSchemaNativeTypes
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from pydantic import AfterValidator
|
||||
from typing_extensions import Annotated, Any, Unpack
|
||||
from typing import Annotated, Any, Literal, Unpack
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
|
||||
class ConstTypeParser(GenericTypeParser):
|
||||
@@ -33,11 +33,15 @@ class ConstTypeParser(GenericTypeParser):
|
||||
return const_type, parsed_properties
|
||||
|
||||
def _build_const_type(self, const_value):
|
||||
def _validate_const_value(value: Any) -> Any:
|
||||
if value != const_value:
|
||||
raise ValueError(
|
||||
f"Value must be equal to the constant value: {const_value}"
|
||||
)
|
||||
return value
|
||||
try:
|
||||
hash(const_value)
|
||||
return Literal[const_value]
|
||||
except TypeError:
|
||||
def _validate_const_value(value: Any) -> Any:
|
||||
if value != const_value:
|
||||
raise ValueError(
|
||||
f"Value must be equal to the constant value: {const_value}"
|
||||
)
|
||||
return value
|
||||
|
This is a good idea and implementation, but would have to be merged in a separate PR. This is a good idea and implementation, but would have to be merged in a separate PR.
|
||||
|
||||
return Annotated[type(const_value), AfterValidator(_validate_const_value)]
|
||||
return Annotated[type(const_value), AfterValidator(_validate_const_value)]
|
||||
|
||||
@@ -2,7 +2,7 @@ from jambo.parser._type_parser import GenericTypeParser
|
||||
from jambo.types.json_schema_type import JSONSchemaNativeTypes
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from typing_extensions import Unpack
|
||||
from typing import Unpack
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from jambo.parser._type_parser import GenericTypeParser
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from typing_extensions import Unpack
|
||||
from typing import Unpack
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
|
||||
class FloatTypeParser(GenericTypeParser):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from jambo.parser._type_parser import GenericTypeParser
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from typing_extensions import Unpack
|
||||
from typing import Unpack
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
|
||||
class IntTypeParser(GenericTypeParser):
|
||||
|
||||
25
jambo/parser/null_type_parser.py
Normal file
@@ -0,0 +1,25 @@
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
from jambo.parser._type_parser import GenericTypeParser
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
from typing import Unpack
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
class NullTypeParser(GenericTypeParser):
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
mapped_type = None
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
json_schema_type = "type:null"
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
type_mappings = {
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
"default": "default",
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
}
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
def from_properties_impl(
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
self, name, properties, **kwargs: Unpack[TypeParserOptions]
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
):
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
default_value = properties.get("default")
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
if default_value is not None:
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
raise ValueError(f"Default value for {name} must be None.")
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
return None, mapped_properties
|
||||
|
Already implemented in a previous PR that was merged, sorry Already implemented in a previous PR that was merged, sorry
|
||||
@@ -2,7 +2,7 @@ from jambo.parser._type_parser import GenericTypeParser
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, create_model
|
||||
from typing_extensions import Any, Unpack
|
||||
from typing import Any, Unpack
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
|
||||
class ObjectTypeParser(GenericTypeParser):
|
||||
|
||||
72
jambo/parser/oneof_type_parser.py
Normal file
@@ -0,0 +1,72 @@
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
from types import UnionType
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
from jambo.parser._type_parser import GenericTypeParser
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
from pydantic import Field, BeforeValidator, TypeAdapter, ValidationError
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
from typing import Annotated, Unpack, Any
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
from functools import reduce
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
from operator import or_
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
class OneOfTypeParser(GenericTypeParser):
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
mapped_type = UnionType
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
json_schema_type = "oneOf"
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
def from_properties_impl(
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
self, name, properties, **kwargs: Unpack[TypeParserOptions]
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
):
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
if "oneOf" not in properties:
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
raise ValueError(f"Invalid JSON Schema: {properties}")
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
if not isinstance(properties["oneOf"], list):
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
raise ValueError(f"Invalid JSON Schema: {properties['oneOf']}")
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
sub_properties = properties["oneOf"]
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
sub_types = [
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
GenericTypeParser.type_from_properties(name, subProperty, **kwargs)
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
for subProperty in sub_properties
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
]
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
if not kwargs.get("required", False):
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
mapped_properties["default"] = mapped_properties.get("default")
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
field_types = [
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
Annotated[t, Field(**v)] if self._has_meaningful_constraints(v) else t
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
for t, v in sub_types
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
]
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
union_type = reduce(or_, field_types)
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
discriminator = properties.get("discriminator")
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
if discriminator and isinstance(discriminator, dict):
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
property_name = discriminator.get("propertyName")
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
if property_name:
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
validated_type = Annotated[union_type, Field(discriminator=property_name)]
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
return validated_type, mapped_properties
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
def validate_one_of(value: Any) -> Any:
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
matched_count = 0
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
validation_errors = []
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
for field_type in field_types:
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
try:
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
adapter = TypeAdapter(field_type)
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
adapter.validate_python(value)
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
matched_count += 1
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
except ValidationError as e:
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
validation_errors.append(str(e))
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
continue
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
if matched_count == 0:
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
raise ValueError(f"Value does not match any of the oneOf schemas")
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
elif matched_count > 1:
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
raise ValueError(f"Value matches multiple oneOf schemas, exactly one expected")
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
return value
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
validated_type = Annotated[union_type, BeforeValidator(validate_one_of)]
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
return validated_type, mapped_properties
|
||||
|
This entire feature should be in a separate PR This entire feature should be in a separate PR
|
||||
@@ -1,10 +1,10 @@
|
||||
from jambo.parser import GenericTypeParser
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from typing_extensions import Any, ForwardRef, Literal, TypeVar, Union, Unpack
|
||||
from typing import Any, ForwardRef, Literal, TypeVar, Unpack
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
|
||||
RefType = TypeVar("RefType", bound=Union[type, ForwardRef])
|
||||
RefType = TypeVar("RefType", bound=type | ForwardRef)
|
||||
|
||||
RefStrategy = Literal["forward_ref", "def_ref"]
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from jambo.parser._type_parser import GenericTypeParser
|
||||
from jambo.types.type_parser_options import TypeParserOptions
|
||||
|
||||
from pydantic import EmailStr, HttpUrl, IPvAnyAddress
|
||||
from typing_extensions import Unpack
|
||||
from pydantic import EmailStr, HttpUrl, IPvAnyAddress, FilePath
|
||||
from typing import Unpack
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
from datetime import date, datetime, time
|
||||
|
||||
@@ -16,7 +16,6 @@ class StringTypeParser(GenericTypeParser):
|
||||
"maxLength": "max_length",
|
||||
"minLength": "min_length",
|
||||
"pattern": "pattern",
|
||||
"format": "format",
|
||||
}
|
||||
|
||||
format_type_mapping = {
|
||||
@@ -28,6 +27,8 @@ class StringTypeParser(GenericTypeParser):
|
||||
"date": date,
|
||||
"time": time,
|
||||
"date-time": datetime,
|
||||
"binary": bytes,
|
||||
"file-path": FilePath,
|
||||
}
|
||||
|
||||
format_pattern_mapping = {
|
||||
@@ -52,4 +53,8 @@ class StringTypeParser(GenericTypeParser):
|
||||
if format_type in self.format_pattern_mapping:
|
||||
mapped_properties["pattern"] = self.format_pattern_mapping[format_type]
|
||||
|
||||
if "json_schema_extra" not in mapped_properties:
|
||||
mapped_properties["json_schema_extra"] = {}
|
||||
mapped_properties["json_schema_extra"]["format"] = format_type
|
||||
|
||||
return mapped_type, mapped_properties
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
from typing_extensions import Dict, List, Literal, TypedDict, Union
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
from __future__ import annotations
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
from typing import Literal, TypedDict
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
|
||||
from types import NoneType
|
||||
|
||||
@@ -19,7 +21,7 @@ JSONSchemaNativeTypes: tuple[type, ...] = (
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
)
|
||||
|
||||
|
||||
JSONType = Union[str, int, float, bool, None, Dict[str, "JSONType"], List["JSONType"]]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONType = str | int | float | bool | None | dict[str, "JSONType"] | list["JSONType"]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
|
||||
|
||||
class JSONSchema(TypedDict, total=False):
|
||||
@@ -27,23 +29,23 @@ class JSONSchema(TypedDict, total=False):
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
title: str
|
||||
description: str
|
||||
default: JSONType
|
||||
examples: List[JSONType]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
examples: list[JSONType]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
|
||||
# Type definitions
|
||||
type: Union[JSONSchemaType, List[JSONSchemaType]]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
type: JSONSchemaType | list[JSONSchemaType]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
|
||||
# Object-specific keywords
|
||||
properties: Dict[str, "JSONSchema"]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
required: List[str]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
additionalProperties: Union[bool, "JSONSchema"]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
properties: dict[str, JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
required: list[str]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
additionalProperties: bool | JSONSchema
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
minProperties: int
|
||||
maxProperties: int
|
||||
patternProperties: Dict[str, "JSONSchema"]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
dependencies: Dict[str, Union[List[str], "JSONSchema"]]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
patternProperties: dict[str, JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
dependencies: dict[str, list[str] | JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
|
||||
# Array-specific keywords
|
||||
items: Union["JSONSchema", List["JSONSchema"]]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
additionalItems: Union[bool, "JSONSchema"]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
items: JSONSchema | list[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
additionalItems: bool | JSONSchema
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
minItems: int
|
||||
maxItems: int
|
||||
uniqueItems: bool
|
||||
@@ -62,32 +64,32 @@ class JSONSchema(TypedDict, total=False):
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
multipleOf: float
|
||||
|
||||
# Enum and const
|
||||
enum: List[JSONType]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
enum: list[JSONType]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
const: JSONType
|
||||
|
||||
# Conditionals
|
||||
if_: "JSONSchema" # 'if' is a reserved word in Python
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
then: "JSONSchema"
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
else_: "JSONSchema" # 'else' is also a reserved word
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
if_: JSONSchema # 'if' is a reserved word in Python
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
then: JSONSchema
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
else_: JSONSchema # 'else' is also a reserved word
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
|
||||
# Combination keywords
|
||||
allOf: List["JSONSchema"]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
anyOf: List["JSONSchema"]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
oneOf: List["JSONSchema"]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
not_: "JSONSchema" # 'not' is a reserved word
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
allOf: list[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
anyOf: list[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
oneOf: list[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
not_: JSONSchema # 'not' is a reserved word
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
|
||||
|
||||
# Fix forward references
|
||||
JSONSchema.__annotations__["properties"] = Dict[str, JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["items"] = Union[JSONSchema, List[JSONSchema]]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["additionalItems"] = Union[bool, JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["additionalProperties"] = Union[bool, JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["patternProperties"] = Dict[str, JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["dependencies"] = Dict[str, Union[List[str], JSONSchema]]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["properties"] = dict[str, JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["items"] = JSONSchema | list[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["additionalItems"] = bool | JSONSchema
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["additionalProperties"] = bool | JSONSchema
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["patternProperties"] = dict[str, JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["dependencies"] = dict[str, list[str] | JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["if_"] = JSONSchema
|
||||
JSONSchema.__annotations__["then"] = JSONSchema
|
||||
JSONSchema.__annotations__["else_"] = JSONSchema
|
||||
JSONSchema.__annotations__["allOf"] = List[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["anyOf"] = List[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["oneOf"] = List[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["allOf"] = list[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["anyOf"] = list[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["oneOf"] = list[JSONSchema]
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
JSONSchema.__annotations__["not_"] = JSONSchema
|
||||
|
||||
|
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
A large change with little impact, is there a necessity for this change? A large change with little impact, is there a necessity for this change?
|
||||
@@ -1,6 +1,6 @@
|
||||
from jambo.types.json_schema_type import JSONSchema
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
from typing import TypedDict
|
||||
|
`typing_extensions` is prefered for Python3.10 compatibility
|
||||
|
||||
|
||||
class TypeParserOptions(TypedDict):
|
||||
|
||||
@@ -117,8 +117,8 @@ class TestAllOfTypeParser(TestCase):
|
||||
)
|
||||
|
||||
self.assertEqual(type_parsing, str)
|
||||
self.assertEqual(type_validator["max_length"], 11)
|
||||
self.assertEqual(type_validator["min_length"], 1)
|
||||
self.assertEqual(type_validator["max_length"], 4)
|
||||
self.assertEqual(type_validator["min_length"], 2)
|
||||
|
||||
def test_all_of_type_parser_in_fields(self):
|
||||
"""
|
||||
@@ -138,8 +138,8 @@ class TestAllOfTypeParser(TestCase):
|
||||
)
|
||||
|
||||
self.assertEqual(type_parsing, str)
|
||||
self.assertEqual(type_validator["max_length"], 11)
|
||||
self.assertEqual(type_validator["min_length"], 1)
|
||||
self.assertEqual(type_validator["max_length"], 4)
|
||||
self.assertEqual(type_validator["min_length"], 2)
|
||||
|
||||
def test_invalid_all_of(self):
|
||||
"""
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from jambo.parser.anyof_type_parser import AnyOfTypeParser
|
||||
|
||||
from typing_extensions import Annotated, Union, get_args, get_origin
|
||||
from typing import Annotated, get_args, get_origin
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
@@ -42,15 +42,12 @@ class TestAnyOfTypeParser(TestCase):
|
||||
)
|
||||
|
||||
# check union type has string and int
|
||||
self.assertEqual(get_origin(type_parsing), Union)
|
||||
self.assertEqual(get_origin(type_parsing), type(str | int))
|
||||
|
||||
type_1, type_2 = get_args(type_parsing)
|
||||
|
||||
self.assertEqual(get_origin(type_1), Annotated)
|
||||
self.assertIn(str, get_args(type_1))
|
||||
|
||||
self.assertEqual(get_origin(type_2), Annotated)
|
||||
self.assertIn(int, get_args(type_2))
|
||||
self.assertEqual(type_1, str)
|
||||
self.assertEqual(type_2, int)
|
||||
|
||||
def test_any_of_string_or_int_with_default(self):
|
||||
"""
|
||||
@@ -70,15 +67,12 @@ class TestAnyOfTypeParser(TestCase):
|
||||
)
|
||||
|
||||
# check union type has string and int
|
||||
self.assertEqual(get_origin(type_parsing), Union)
|
||||
self.assertEqual(get_origin(type_parsing), type(str | int))
|
||||
|
||||
type_1, type_2 = get_args(type_parsing)
|
||||
|
||||
self.assertEqual(get_origin(type_1), Annotated)
|
||||
self.assertIn(str, get_args(type_1))
|
||||
|
||||
self.assertEqual(get_origin(type_2), Annotated)
|
||||
self.assertIn(int, get_args(type_2))
|
||||
self.assertEqual(type_1, str)
|
||||
self.assertEqual(type_2, int)
|
||||
|
||||
self.assertEqual(type_validator["default"], 42)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from jambo.parser import ArrayTypeParser
|
||||
|
||||
from typing_extensions import get_args
|
||||
from typing import get_args
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
@@ -97,3 +97,15 @@ class TestArrayTypeParser(TestCase):
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
parser.from_properties("placeholder", properties)
|
||||
|
||||
def test_array_parser_required_without_default(self):
|
||||
parser = ArrayTypeParser()
|
||||
|
||||
properties = {"items": {"type": "string"}}
|
||||
|
||||
type_parsing, type_validator = parser.from_properties(
|
||||
"test_array", properties, required=True
|
||||
)
|
||||
|
||||
self.assertNotIn("default_factory", type_validator)
|
||||
self.assertNotIn("default", type_validator)
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from jambo.parser import ConstTypeParser
|
||||
|
||||
from typing_extensions import Annotated, get_args, get_origin
|
||||
from typing import Annotated, Literal, get_args, get_origin
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class TestConstTypeParser(TestCase):
|
||||
def test_const_type_parser(self):
|
||||
def test_const_type_parser_hashable_value(self):
|
||||
parser = ConstTypeParser()
|
||||
|
||||
expected_const_value = "United States of America"
|
||||
@@ -16,8 +16,53 @@ class TestConstTypeParser(TestCase):
|
||||
"country", properties
|
||||
)
|
||||
|
||||
self.assertEqual(get_origin(parsed_type), Literal)
|
||||
self.assertEqual(get_args(parsed_type), (expected_const_value,))
|
||||
|
||||
self.assertEqual(parsed_properties["default"], expected_const_value)
|
||||
|
||||
def test_const_type_parser_non_hashable_value(self):
|
||||
parser = ConstTypeParser()
|
||||
|
||||
expected_const_value = [1, 2, 3]
|
||||
properties = {"const": expected_const_value}
|
||||
|
||||
parsed_type, parsed_properties = parser.from_properties_impl(
|
||||
"list_const", properties
|
||||
)
|
||||
|
||||
self.assertEqual(get_origin(parsed_type), Annotated)
|
||||
self.assertIn(str, get_args(parsed_type))
|
||||
self.assertIn(list, get_args(parsed_type))
|
||||
|
||||
self.assertEqual(parsed_properties["default"], expected_const_value)
|
||||
|
||||
def test_const_type_parser_integer_value(self):
|
||||
parser = ConstTypeParser()
|
||||
|
||||
expected_const_value = 42
|
||||
properties = {"const": expected_const_value}
|
||||
|
||||
parsed_type, parsed_properties = parser.from_properties_impl(
|
||||
"int_const", properties
|
||||
)
|
||||
|
||||
self.assertEqual(get_origin(parsed_type), Literal)
|
||||
self.assertEqual(get_args(parsed_type), (expected_const_value,))
|
||||
|
||||
self.assertEqual(parsed_properties["default"], expected_const_value)
|
||||
|
||||
def test_const_type_parser_boolean_value(self):
|
||||
parser = ConstTypeParser()
|
||||
|
||||
expected_const_value = True
|
||||
properties = {"const": expected_const_value}
|
||||
|
||||
parsed_type, parsed_properties = parser.from_properties_impl(
|
||||
"bool_const", properties
|
||||
)
|
||||
|
||||
self.assertEqual(get_origin(parsed_type), Literal)
|
||||
self.assertEqual(get_args(parsed_type), (expected_const_value,))
|
||||
|
||||
self.assertEqual(parsed_properties["default"], expected_const_value)
|
||||
|
||||
|
||||
43
tests/parser/test_null_type_parser.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from jambo.parser import NullTypeParser
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class TestNullTypeParser(TestCase):
|
||||
def test_null_parser_no_options(self):
|
||||
parser = NullTypeParser()
|
||||
|
||||
properties = {"type": "null"}
|
||||
|
||||
type_parsing, type_validator = parser.from_properties_impl(
|
||||
"placeholder", properties
|
||||
)
|
||||
|
||||
self.assertEqual(type_parsing, None)
|
||||
self.assertEqual(type_validator, {"default": None})
|
||||
|
||||
def test_null_parser_with_default(self):
|
||||
parser = NullTypeParser()
|
||||
|
||||
properties = {
|
||||
"type": "null",
|
||||
"default": None,
|
||||
}
|
||||
|
||||
type_parsing, type_validator = parser.from_properties_impl(
|
||||
"placeholder", properties
|
||||
)
|
||||
|
||||
self.assertEqual(type_parsing, None)
|
||||
self.assertEqual(type_validator["default"], None)
|
||||
|
||||
def test_null_parser_with_invalid_default(self):
|
||||
parser = NullTypeParser()
|
||||
|
||||
properties = {
|
||||
"type": "null",
|
||||
"default": "invalid",
|
||||
}
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
parser.from_properties_impl("placeholder", properties)
|
||||
477
tests/parser/test_oneof_type_parser.py
Normal file
@@ -0,0 +1,477 @@
|
||||
from jambo import SchemaConverter
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class TestOneOfTypeParser(TestCase):
|
||||
def test_oneof_basic_integer_and_string(self):
|
||||
schema = {
|
||||
"title": "Person",
|
||||
"description": "A person with an ID that can be either an integer or a formatted string",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"oneOf": [
|
||||
{"type": "integer", "minimum": 1},
|
||||
{"type": "string", "pattern": "^[A-Z]{2}[0-9]{4}$"},
|
||||
]
|
||||
},
|
||||
},
|
||||
"required": ["id"],
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
obj1 = Model(id=123)
|
||||
self.assertEqual(obj1.id, 123)
|
||||
|
||||
obj2 = Model(id="AB1234")
|
||||
self.assertEqual(obj2.id, "AB1234")
|
||||
|
||||
def test_oneof_validation_failures(self):
|
||||
schema = {
|
||||
"title": "Person",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"oneOf": [
|
||||
{"type": "integer", "minimum": 1},
|
||||
{"type": "string", "pattern": "^[A-Z]{2}[0-9]{4}$"},
|
||||
]
|
||||
},
|
||||
},
|
||||
"required": ["id"],
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(id=-5)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(id="invalid")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(id=123.45)
|
||||
|
||||
def test_oneof_with_conflicting_schemas(self):
|
||||
schema = {
|
||||
"title": "Value",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"data": {
|
||||
"oneOf": [
|
||||
{"type": "number", "multipleOf": 2},
|
||||
{"type": "number", "multipleOf": 3},
|
||||
]
|
||||
},
|
||||
},
|
||||
"required": ["data"],
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
obj1 = Model(data=4)
|
||||
self.assertEqual(obj1.data, 4)
|
||||
|
||||
obj2 = Model(data=9)
|
||||
self.assertEqual(obj2.data, 9)
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
Model(data=6)
|
||||
self.assertIn("matches multiple oneOf schemas", str(cm.exception))
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(data=5)
|
||||
|
||||
def test_oneof_with_objects(self):
|
||||
schema = {
|
||||
"title": "Contact",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"contact_info": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"email": {"type": "string", "format": "email"}
|
||||
},
|
||||
"required": ["email"],
|
||||
"additionalProperties": False
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"phone": {"type": "string", "pattern": "^[0-9-]+$"}
|
||||
},
|
||||
"required": ["phone"],
|
||||
"additionalProperties": False
|
||||
}
|
||||
]
|
||||
},
|
||||
},
|
||||
"required": ["contact_info"],
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
obj1 = Model(contact_info={"email": "user@example.com"})
|
||||
self.assertEqual(obj1.contact_info.email, "user@example.com")
|
||||
|
||||
obj2 = Model(contact_info={"phone": "123-456-7890"})
|
||||
self.assertEqual(obj2.contact_info.phone, "123-456-7890")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(contact_info={"email": "user@example.com", "phone": "123-456-7890"})
|
||||
|
||||
def test_oneof_with_discriminator_basic(self):
|
||||
schema = {
|
||||
"title": "Pet",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"pet": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "cat"},
|
||||
"meows": {"type": "boolean"}
|
||||
},
|
||||
"required": ["type", "meows"]
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "dog"},
|
||||
"barks": {"type": "boolean"}
|
||||
},
|
||||
"required": ["type", "barks"]
|
||||
}
|
||||
],
|
||||
"discriminator": {
|
||||
"propertyName": "type"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["pet"]
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
cat = Model(pet={"type": "cat", "meows": True})
|
||||
self.assertEqual(cat.pet.type, "cat")
|
||||
self.assertEqual(cat.pet.meows, True)
|
||||
|
||||
dog = Model(pet={"type": "dog", "barks": False})
|
||||
self.assertEqual(dog.pet.type, "dog")
|
||||
self.assertEqual(dog.pet.barks, False)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(pet={"type": "cat", "barks": True})
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(pet={"type": "bird", "flies": True})
|
||||
|
||||
def test_oneof_with_discriminator_mapping(self):
|
||||
schema = {
|
||||
"title": "Vehicle",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"vehicle": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"vehicle_type": {"const": "car"},
|
||||
"doors": {"type": "integer", "minimum": 2, "maximum": 4}
|
||||
},
|
||||
"required": ["vehicle_type", "doors"]
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"vehicle_type": {"const": "motorcycle"},
|
||||
"engine_size": {"type": "number", "minimum": 125}
|
||||
},
|
||||
"required": ["vehicle_type", "engine_size"]
|
||||
}
|
||||
],
|
||||
"discriminator": {
|
||||
"propertyName": "vehicle_type",
|
||||
"mapping": {
|
||||
"car": "#/properties/vehicle/oneOf/0",
|
||||
"motorcycle": "#/properties/vehicle/oneOf/1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["vehicle"]
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
car = Model(vehicle={"vehicle_type": "car", "doors": 4})
|
||||
self.assertEqual(car.vehicle.vehicle_type, "car")
|
||||
self.assertEqual(car.vehicle.doors, 4)
|
||||
|
||||
motorcycle = Model(vehicle={"vehicle_type": "motorcycle", "engine_size": 600.0})
|
||||
self.assertEqual(motorcycle.vehicle.vehicle_type, "motorcycle")
|
||||
self.assertEqual(motorcycle.vehicle.engine_size, 600.0)
|
||||
|
||||
def test_oneof_with_discriminator_invalid_values(self):
|
||||
schema = {
|
||||
"title": "Shape",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"shape": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "circle"},
|
||||
"radius": {"type": "number", "minimum": 0}
|
||||
},
|
||||
"required": ["type", "radius"]
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "square"},
|
||||
"side": {"type": "number", "minimum": 0}
|
||||
},
|
||||
"required": ["type", "side"]
|
||||
}
|
||||
],
|
||||
"discriminator": {
|
||||
"propertyName": "type"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["shape"]
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(shape={"type": "triangle", "base": 5, "height": 3})
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(shape={"type": "circle", "side": 5})
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(shape={"radius": 5})
|
||||
|
||||
def test_oneof_missing_properties(self):
|
||||
schema = {
|
||||
"title": "Test",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"notOneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "integer"},
|
||||
]
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
SchemaConverter.build(schema)
|
||||
|
||||
def test_oneof_invalid_properties(self):
|
||||
schema = {
|
||||
"title": "Test",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"oneOf": None
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
SchemaConverter.build(schema)
|
||||
|
||||
def test_oneof_with_default_value(self):
|
||||
schema = {
|
||||
"title": "Test",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "integer"},
|
||||
],
|
||||
"default": "test"
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
obj = Model()
|
||||
self.assertEqual(obj.value, "test")
|
||||
|
||||
def test_oneof_with_invalid_default_value(self):
|
||||
schema = {
|
||||
"title": "Test",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"oneOf": [
|
||||
{"type": "string", "minLength": 5},
|
||||
{"type": "integer", "minimum": 10},
|
||||
],
|
||||
"default": "hi"
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
SchemaConverter.build(schema)
|
||||
|
||||
def test_oneof_discriminator_without_property_name(self):
|
||||
schema = {
|
||||
"title": "Test",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "a"},
|
||||
"value": {"type": "string"}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "b"},
|
||||
"value": {"type": "integer"}
|
||||
}
|
||||
}
|
||||
],
|
||||
"discriminator": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
obj = Model(value={"type": "a", "value": "test", "extra": "invalid"})
|
||||
self.assertEqual(obj.value.type, "a")
|
||||
self.assertEqual(obj.value.value, "test")
|
||||
|
||||
obj2 = Model(value={"type": "b", "value": 42})
|
||||
self.assertEqual(obj2.value.type, "b")
|
||||
self.assertEqual(obj2.value.value, 42)
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
Model(value={"type": "c", "value": "test"})
|
||||
self.assertIn("does not match any of the oneOf schemas", str(cm.exception))
|
||||
|
||||
def test_oneof_multiple_matches_without_discriminator(self):
|
||||
schema = {
|
||||
"title": "Test",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"data": {"type": "string"}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"data": {"type": "string"},
|
||||
"optional": {"type": "string"}
|
||||
}
|
||||
}
|
||||
],
|
||||
"discriminator": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
Model(value={"data": "test"})
|
||||
self.assertIn("matches multiple oneOf schemas", str(cm.exception))
|
||||
|
||||
def test_oneof_overlapping_strings_from_docs(self):
|
||||
schema = {
|
||||
"title": "SimpleExample",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"value": {
|
||||
"oneOf": [
|
||||
{"type": "string", "maxLength": 6},
|
||||
{"type": "string", "minLength": 4}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": ["value"]
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
obj1 = Model(value="hi")
|
||||
self.assertEqual(obj1.value, "hi")
|
||||
|
||||
obj2 = Model(value="very long string")
|
||||
self.assertEqual(obj2.value, "very long string")
|
||||
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
Model(value="hello") # 5 chars: matches maxLength=6 AND minLength=4
|
||||
self.assertIn("matches multiple oneOf schemas", str(cm.exception))
|
||||
|
||||
def test_oneof_shapes_discriminator_from_docs(self):
|
||||
schema = {
|
||||
"title": "Shape",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"shape": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "circle"},
|
||||
"radius": {"type": "number", "minimum": 0}
|
||||
},
|
||||
"required": ["type", "radius"]
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {"const": "rectangle"},
|
||||
"width": {"type": "number", "minimum": 0},
|
||||
"height": {"type": "number", "minimum": 0}
|
||||
},
|
||||
"required": ["type", "width", "height"]
|
||||
}
|
||||
],
|
||||
"discriminator": {
|
||||
"propertyName": "type"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["shape"]
|
||||
}
|
||||
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
circle = Model(shape={"type": "circle", "radius": 5.0})
|
||||
self.assertEqual(circle.shape.type, "circle")
|
||||
self.assertEqual(circle.shape.radius, 5.0)
|
||||
|
||||
rectangle = Model(shape={"type": "rectangle", "width": 10, "height": 20})
|
||||
self.assertEqual(rectangle.shape.type, "rectangle")
|
||||
self.assertEqual(rectangle.shape.width, 10)
|
||||
self.assertEqual(rectangle.shape.height, 20)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(shape={"type": "circle", "width": 10})
|
||||
@@ -1,6 +1,6 @@
|
||||
from jambo.parser import StringTypeParser
|
||||
|
||||
from pydantic import EmailStr, HttpUrl, IPvAnyAddress
|
||||
from pydantic import EmailStr, HttpUrl, IPvAnyAddress, FilePath
|
||||
|
||||
from datetime import date, datetime, time
|
||||
from unittest import TestCase
|
||||
@@ -159,6 +159,18 @@ class TestStringTypeParser(TestCase):
|
||||
type_validator["pattern"], parser.format_pattern_mapping[format_type]
|
||||
)
|
||||
|
||||
def test_string_parser_with_file_path_format(self):
|
||||
parser = StringTypeParser()
|
||||
|
||||
properties = {
|
||||
"type": "string",
|
||||
"format": "file-path",
|
||||
}
|
||||
|
||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||
|
||||
self.assertEqual(type_parsing, FilePath)
|
||||
|
||||
def test_string_parser_with_unsupported_format(self):
|
||||
parser = StringTypeParser()
|
||||
|
||||
@@ -197,3 +209,18 @@ class TestStringTypeParser(TestCase):
|
||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||
|
||||
self.assertEqual(type_parsing, datetime)
|
||||
|
||||
def test_string_parser_with_byte_format(self):
|
||||
parser = StringTypeParser()
|
||||
|
||||
properties = {
|
||||
"type": "string",
|
||||
"format": "binary",
|
||||
}
|
||||
|
||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||
|
||||
self.assertEqual(type_parsing, bytes)
|
||||
|
||||
self.assertIn("json_schema_extra", type_validator)
|
||||
self.assertEqual(type_validator["json_schema_extra"]["format"], "binary")
|
||||
|
||||
@@ -358,10 +358,13 @@ class TestSchemaConverter(TestCase):
|
||||
Model = SchemaConverter.build(schema)
|
||||
|
||||
obj = Model(
|
||||
name="J",
|
||||
name="John",
|
||||
)
|
||||
|
||||
self.assertEqual(obj.name, "J")
|
||||
self.assertEqual(obj.name, "John")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(name="J")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Model(name="John Invalid")
|
||||
|
||||
typing_extensionsis prefered for Python3.10 compatibility