Compare commits
73 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a75ad61a21 | |||
|
|
2fd092cd8e | ||
|
|
e12396477f | ||
| 6440c30a91 | |||
|
19d1f72951
|
|||
|
02a28c9586
|
|||
|
eee32a02ae
|
|||
| 00802744dd | |||
|
dd2e7d221c
|
|||
|
558abf5d40
|
|||
| 70afa80ccf | |||
|
422cc2efe0
|
|||
|
|
dd31f62ef2 | ||
| e8bda6bc07 | |||
|
d8fe98639a
|
|||
| 666e12262f | |||
|
ab9646238e
|
|||
| dba492a6dc | |||
|
628abe161d
|
|||
|
136d68d273
|
|||
|
fcea994dd6
|
|||
| 39a9612106 | |||
|
27e756dadf
|
|||
|
40106e4765
|
|||
|
d418ad96ad
|
|||
| 79e65b994e | |||
|
beed4e5e97
|
|||
| b705a3a70b | |||
|
268ac85667
|
|||
|
20872d4a91
|
|||
| 34910b55d7 | |||
|
a3cbd5bc3d
|
|||
|
682f19654d
|
|||
|
4baaeed349
|
|||
|
9837a99ec9
|
|||
|
3a8ca951db
|
|||
|
57f8b571de
|
|||
|
5ec30cd565
|
|||
|
c2b9e8daf8
|
|||
|
328eb66034
|
|||
|
4de711075e
|
|||
|
abc8bc2e40
|
|||
|
10bad254d7
|
|||
| b5e2d703cb | |||
|
|
44fa0cf16a | ||
| d11e3191c3 | |||
| 2da409e6df | |||
| e775b53f7d | |||
|
f15913c58e
|
|||
|
f80a1bbda3
|
|||
| b31c990b54 | |||
|
a0d15726d4
|
|||
| 59f062ec37 | |||
|
5036059272
|
|||
|
90639b6426
|
|||
|
e43e92cb9e
|
|||
|
ffbd124cf9
|
|||
|
cfbe1f38c8
|
|||
|
9823e69329
|
|||
|
84292cf3c0
|
|||
|
8b1520741b
|
|||
|
c7e366cf08
|
|||
|
ebcc8a295e
|
|||
|
07f301db1c
|
|||
|
c9330dfd6d
|
|||
|
|
9bc16ff1aa | ||
|
|
43ce95cc9a | ||
| 81c149120e | |||
| 171dddabab | |||
|
f0192ee6d3
|
|||
|
|
82feea0ab1 | ||
| 4d5ac1c885 | |||
|
92c174c189
|
@@ -1,7 +1,7 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: v0.11.4
|
rev: v0.14.7
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter.
|
# Run the linter.
|
||||||
- id: ruff
|
- id: ruff
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,7 +2,7 @@
|
|||||||
name: Bug report
|
name: Bug report
|
||||||
about: Create a report to help us improve
|
about: Create a report to help us improve
|
||||||
title: "[BUG] Title Here"
|
title: "[BUG] Title Here"
|
||||||
labels: enhancement
|
labels: bug
|
||||||
assignees: HideyoshiNakazone
|
assignees: HideyoshiNakazone
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
5
.github/workflows/build.yml
vendored
5
.github/workflows/build.yml
vendored
@@ -23,6 +23,7 @@ jobs:
|
|||||||
- "3.11"
|
- "3.11"
|
||||||
- "3.12"
|
- "3.12"
|
||||||
- "3.13"
|
- "3.13"
|
||||||
|
- "3.14"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -31,7 +32,7 @@ jobs:
|
|||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v5
|
||||||
with:
|
with:
|
||||||
# Install a specific version of uv.
|
# Install a specific version of uv.
|
||||||
version: "0.6.14"
|
version: "0.9.15"
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
cache-dependency-glob: "uv.lock"
|
cache-dependency-glob: "uv.lock"
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
@@ -68,7 +69,7 @@ jobs:
|
|||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v5
|
||||||
with:
|
with:
|
||||||
# Install a specific version of uv.
|
# Install a specific version of uv.
|
||||||
version: "0.6.14"
|
version: "0.9.15"
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
cache-dependency-glob: "uv.lock"
|
cache-dependency-glob: "uv.lock"
|
||||||
|
|
||||||
|
|||||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
3.14
|
||||||
48
README.md
48
README.md
@@ -1,8 +1,8 @@
|
|||||||
# Jambo - JSON Schema to Pydantic Converter
|
# Jambo - JSON Schema to Pydantic Converter
|
||||||
|
|
||||||
<p align="center">
|
<p style="text-align:center">
|
||||||
<a href="https://github.com/HideyoshiNakazone/jambo" target="_blank">
|
<a href="https://github.com/HideyoshiNakazone/jambo" target="_blank">
|
||||||
<img src="https://img.shields.io/github/last-commit/HideyoshiNakazone/jambo.svg">
|
<img src="https://img.shields.io/github/last-commit/HideyoshiNakazone/jambo.svg" alt="Last commit">
|
||||||
<img src="https://github.com/HideyoshiNakazone/jambo/actions/workflows/build.yml/badge.svg" alt="Tests">
|
<img src="https://github.com/HideyoshiNakazone/jambo/actions/workflows/build.yml/badge.svg" alt="Tests">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://codecov.io/gh/HideyoshiNakazone/jambo" target="_blank">
|
<a href="https://codecov.io/gh/HideyoshiNakazone/jambo" target="_blank">
|
||||||
@@ -19,12 +19,13 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
**Jambo** is a Python package that automatically converts [JSON Schema](https://json-schema.org/) definitions into [Pydantic](https://docs.pydantic.dev/) models.
|
**Jambo** is a Python package that automatically converts [JSON Schema](https://json-schema.org/) definitions into [Pydantic](https://docs.pydantic.dev/) models.
|
||||||
It's designed to streamline schema validation and enforce type safety using Pydantic's powerful validation features.
|
It's designed to streamline schema validation and enforce type safety using Pydantic's validation features.
|
||||||
|
|
||||||
Created to simplifying the process of dynamically generating Pydantic models for AI frameworks like [LangChain](https://www.langchain.com/), [CrewAI](https://www.crewai.com/), and others.
|
Created to simplify the process of dynamically generating Pydantic models for AI frameworks like [LangChain](https://www.langchain.com/), [CrewAI](https://www.crewai.com/), and others.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
- ✅ Convert JSON Schema into Pydantic models dynamically;
|
- ✅ Convert JSON Schema into Pydantic models dynamically;
|
||||||
@@ -56,10 +57,19 @@ pip install jambo
|
|||||||
|
|
||||||
## 🚀 Usage
|
## 🚀 Usage
|
||||||
|
|
||||||
|
There are two ways to build models with Jambo:
|
||||||
|
|
||||||
|
1. The original static API: `SchemaConverter.build(schema)` doesn't persist any reference cache between calls and doesn't require any configuration.
|
||||||
|
2. The new instance API: use a `SchemaConverter()` instance and call `build_with_cache`, which exposes and persists a reference cache and helper methods.
|
||||||
|
|
||||||
|
The instance API is useful when you want to reuse generated subtypes, inspect cached models, or share caches between converters; all leveraging namespaces via the `$id` property in JSON Schema. See the docs for full details: https://jambo.readthedocs.io/en/latest/usage.ref_cache.html
|
||||||
|
|
||||||
|
|
||||||
|
### Static (compatibility) example
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from jambo import SchemaConverter
|
from jambo import SchemaConverter
|
||||||
|
|
||||||
|
|
||||||
schema = {
|
schema = {
|
||||||
"title": "Person",
|
"title": "Person",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -70,12 +80,40 @@ schema = {
|
|||||||
"required": ["name"],
|
"required": ["name"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Old-style convenience API (kept for compatibility)
|
||||||
Person = SchemaConverter.build(schema)
|
Person = SchemaConverter.build(schema)
|
||||||
|
|
||||||
obj = Person(name="Alice", age=30)
|
obj = Person(name="Alice", age=30)
|
||||||
print(obj)
|
print(obj)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Instance API (recommended for cache control)
|
||||||
|
|
||||||
|
```python
|
||||||
|
from jambo import SchemaConverter
|
||||||
|
|
||||||
|
converter = SchemaConverter()
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {"type": "object", "properties": {"street": {"type": "string"}}},
|
||||||
|
},
|
||||||
|
"required": ["name"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# build_with_cache populates the converter's instance-level ref cache
|
||||||
|
Person = converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
# you can retrieve cached subtypes by name/path
|
||||||
|
cached_person = converter.get_cached_ref("Person")
|
||||||
|
# clear the instance cache when needed
|
||||||
|
converter.clear_ref_cache()
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## ✅ Example Validations
|
## ✅ Example Validations
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ extensions = [
|
|||||||
"sphinx.ext.viewcode",
|
"sphinx.ext.viewcode",
|
||||||
"sphinx.ext.autodoc",
|
"sphinx.ext.autodoc",
|
||||||
"sphinx.ext.napoleon",
|
"sphinx.ext.napoleon",
|
||||||
|
"sphinx_autodoc_typehints", # <-- needed
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -35,3 +36,6 @@ html_static_path = ["_static"]
|
|||||||
# -- Options for autodoc -----------------------------------------------------
|
# -- Options for autodoc -----------------------------------------------------
|
||||||
add_module_names = False
|
add_module_names = False
|
||||||
python_use_unqualified_type_names = True
|
python_use_unqualified_type_names = True
|
||||||
|
|
||||||
|
|
||||||
|
autodoc_typehints = "both"
|
||||||
|
|||||||
348
docs/source/usage.ref_cache.rst
Normal file
348
docs/source/usage.ref_cache.rst
Normal file
@@ -0,0 +1,348 @@
|
|||||||
|
===============
|
||||||
|
Reference Cache
|
||||||
|
===============
|
||||||
|
|
||||||
|
The reference cache is named after the mechanism used to implement
|
||||||
|
the `$ref` keyword in the JSON Schema specification.
|
||||||
|
|
||||||
|
Internally, the cache is used by both :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>`
|
||||||
|
and :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>`.
|
||||||
|
However, only :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` exposes the cache through a supported API;
|
||||||
|
:py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>` uses the cache internally and does not provide access to it.
|
||||||
|
|
||||||
|
The reference cache accepts a mutable mapping (typically a plain Python dict)
|
||||||
|
that maps reference names (strings) to generated Pydantic model classes.
|
||||||
|
Since only the reference names are stored it can cause name collisions if
|
||||||
|
multiple schemas with overlapping names are processed using the same cache.
|
||||||
|
Therefore, it's recommended that each namespace or schema source uses its own
|
||||||
|
:class:`SchemaConverter` instance.
|
||||||
|
|
||||||
|
-----------------------------------------
|
||||||
|
Configuring and Using the Reference Cache
|
||||||
|
-----------------------------------------
|
||||||
|
|
||||||
|
The reference cache can be used in three ways:
|
||||||
|
|
||||||
|
* Without a persistent reference cache (no sharing between calls).
|
||||||
|
* Passing an explicit ``ref_cache`` dictionary to a call.
|
||||||
|
* Using the converter instance's default cache (the instance-level cache).
|
||||||
|
|
||||||
|
|
||||||
|
Usage Without Reference Cache
|
||||||
|
=============================
|
||||||
|
|
||||||
|
When you run the library without a persistent reference cache, the generated
|
||||||
|
types are not stored for reuse. Each call to a build method creates fresh
|
||||||
|
Pydantic model classes (they will have different Python object identities).
|
||||||
|
Because nothing is cached, you cannot look up generated subtypes later.
|
||||||
|
|
||||||
|
This is the default behaviour of :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>`.
|
||||||
|
You can achieve the same behaviour with :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` by
|
||||||
|
passing ``without_cache=True``.
|
||||||
|
|
||||||
|
|
||||||
|
Usage: Manually Passing a Reference Cache
|
||||||
|
=========================================
|
||||||
|
|
||||||
|
You can create and pass your own mutable mapping (typically a plain dict)
|
||||||
|
as the reference cache. This gives you full control over sharing and
|
||||||
|
lifetime of cached types. When two converters share the same dict, types
|
||||||
|
created by one converter will be reused by the other.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from jambo import SchemaConverter
|
||||||
|
|
||||||
|
# a shared cache you control
|
||||||
|
shared_cache = {}
|
||||||
|
|
||||||
|
converter1 = SchemaConverter(shared_cache)
|
||||||
|
converter2 = SchemaConverter(shared_cache)
|
||||||
|
|
||||||
|
model1 = converter1.build_with_cache(schema)
|
||||||
|
model2 = converter2.build_with_cache(schema)
|
||||||
|
|
||||||
|
# Because both converters use the same cache object, the built models are the same object
|
||||||
|
assert model1 is model2
|
||||||
|
|
||||||
|
If you prefer a per-call cache (leaving the converter's instance cache unchanged), pass the ``ref_cache`` parameter to
|
||||||
|
:py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>`:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
# pass an explicit, private cache for this call only
|
||||||
|
model_a = converter1.build_with_cache(schema, ref_cache={})
|
||||||
|
model_b = converter1.build_with_cache(schema, ref_cache={})
|
||||||
|
|
||||||
|
# because each call received a fresh dict, the resulting model classes are distinct
|
||||||
|
assert model_a is not model_b
|
||||||
|
|
||||||
|
|
||||||
|
Usage: Using the Instance Default (Instance-level) Cache
|
||||||
|
=======================================================
|
||||||
|
|
||||||
|
By default, a :class:`SchemaConverter` instance creates and keeps an internal
|
||||||
|
reference cache (a plain dict). Reusing the same converter instance across
|
||||||
|
multiple calls will reuse that cache and therefore reuse previously generated
|
||||||
|
model classes.
|
||||||
|
|
||||||
|
That cache is isolated per namespace via the `$id` property in JSON Schema, so
|
||||||
|
schemas with different `$id` values will not collide in the same cache.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from jambo import SchemaConverter
|
||||||
|
|
||||||
|
# no $id in this example, therefore a default namespace is used
|
||||||
|
schema = {
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["street", "city"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "address"],
|
||||||
|
}
|
||||||
|
|
||||||
|
converter = SchemaConverter() # has its own internal cache
|
||||||
|
|
||||||
|
model1 = converter.build_with_cache(schema)
|
||||||
|
model2 = converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
# model1 and model2 are the same object because the instance cache persisted
|
||||||
|
assert model1 is model2
|
||||||
|
|
||||||
|
When passing a schema with a different `$id`, the instance cache keeps types
|
||||||
|
separate:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
schema_a = {
|
||||||
|
"$id": "namespace_a",
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["name"],
|
||||||
|
}
|
||||||
|
|
||||||
|
schema_b = {
|
||||||
|
"$id": "namespace_b",
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["name"],
|
||||||
|
}
|
||||||
|
|
||||||
|
converter = SchemaConverter() # has its own internal cache
|
||||||
|
|
||||||
|
model_a = converter.build_with_cache(schema_a)
|
||||||
|
model_b = converter.build_with_cache(schema_b)
|
||||||
|
|
||||||
|
# different $id values isolate the types in the same cache
|
||||||
|
assert model_a is not model_b
|
||||||
|
|
||||||
|
If you want to temporarily avoid using the instance cache for a single call,
|
||||||
|
use ``without_cache=True``. That causes :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` to
|
||||||
|
use a fresh, empty cache for the duration of that call only:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
model1 = converter.build_with_cache(schema, without_cache=True)
|
||||||
|
model2 = converter.build_with_cache(schema, without_cache=True)
|
||||||
|
|
||||||
|
# each call used a fresh cache, so the models are distinct
|
||||||
|
assert model1 is not model2
|
||||||
|
|
||||||
|
|
||||||
|
Inspecting and Managing the Cache
|
||||||
|
=================================
|
||||||
|
|
||||||
|
The converter provides a small, explicit API to inspect and manage the
|
||||||
|
instance cache.
|
||||||
|
|
||||||
|
Retrieving cached types
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
:py:meth:`SchemaConverter.get_cached_ref <jambo.SchemaConverter.get_cached_ref>`(name, namespace="default") — returns a cached model class or ``None``.
|
||||||
|
|
||||||
|
Retrieving the root type of the schema
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
When retrieving the root type of a schema, pass the schema's ``title`` property as the name.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from jambo import SchemaConverter
|
||||||
|
|
||||||
|
converter = SchemaConverter()
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
"title": "person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
person_model = converter.build_with_cache(schema)
|
||||||
|
cached_person_model = converter.get_cached_ref("person")
|
||||||
|
|
||||||
|
|
||||||
|
Retrieving a subtype
|
||||||
|
~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
When retrieving a subtype, pass a path string (for example, ``parent_name.field_name``) as the name.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from jambo import SchemaConverter
|
||||||
|
|
||||||
|
converter = SchemaConverter()
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
"title": "person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["street", "city"],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
person_model = converter.build_with_cache(schema)
|
||||||
|
cached_address_model = converter.get_cached_ref("person.address")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Retrieving a type from ``$defs``
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
When retrieving a type defined in ``$defs``, access it directly by its name.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from jambo import SchemaConverter
|
||||||
|
|
||||||
|
converter = SchemaConverter()
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
"title": "person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {"$ref": "#/$defs/address"},
|
||||||
|
},
|
||||||
|
"$defs": {
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["street", "city"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
person_model = converter.build_with_cache(schema)
|
||||||
|
cached_address_model = converter.get_cached_ref("address")
|
||||||
|
|
||||||
|
|
||||||
|
Isolation by Namespace
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The instance cache is isolated per namespace via the `$id` property in JSON Schema.
|
||||||
|
When retrieving a cached type, you can specify the namespace to look in
|
||||||
|
(via the ``namespace`` parameter). By default, the ``default`` namespace is used
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from jambo import SchemaConverter
|
||||||
|
|
||||||
|
converter = SchemaConverter()
|
||||||
|
|
||||||
|
schema_a = {
|
||||||
|
"$id": "namespace_a",
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["name"],
|
||||||
|
}
|
||||||
|
|
||||||
|
schema_b = {
|
||||||
|
"$id": "namespace_b",
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["name"],
|
||||||
|
}
|
||||||
|
|
||||||
|
person_a = converter.build_with_cache(schema_a)
|
||||||
|
person_b = converter.build_with_cache(schema_b)
|
||||||
|
|
||||||
|
cached_person_a = converter.get_cached_ref("Person", namespace="namespace_a")
|
||||||
|
cached_person_b = converter.get_cached_ref("Person", namespace="namespace_b")
|
||||||
|
|
||||||
|
assert cached_person_a is person_a
|
||||||
|
assert cached_person_b is person_b
|
||||||
|
|
||||||
|
|
||||||
|
Clearing the cache
|
||||||
|
------------------
|
||||||
|
|
||||||
|
:py:meth:`SchemaConverter.clear_ref_cache <jambo.SchemaConverter.clear_ref_cache>`(namespace: Optional[str]="default") — removes all entries from the instance cache.
|
||||||
|
|
||||||
|
|
||||||
|
When you want to clear the instance cache, use :py:meth:`SchemaConverter.clear_ref_cache <jambo.SchemaConverter.clear_ref_cache>`.
|
||||||
|
You can optionally specify a ``namespace`` to clear only that namespace;
|
||||||
|
otherwise, the default namespace is cleared.
|
||||||
|
|
||||||
|
If you want to clear all namespaces, call :py:meth:`SchemaConverter.clear_ref_cache <jambo.SchemaConverter.clear_ref_cache>` passing `None` as the namespace,
|
||||||
|
which removes all entries from all namespaces.
|
||||||
|
|
||||||
|
|
||||||
|
Notes and Behavioural Differences
|
||||||
|
================================
|
||||||
|
|
||||||
|
* :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>` does not expose or persist an instance cache. If you call it without
|
||||||
|
providing a ``ref_cache`` it will create and use a temporary cache for that
|
||||||
|
call only; nothing from that call will be available later via
|
||||||
|
:py:meth:`SchemaConverter.get_cached_ref <jambo.SchemaConverter.get_cached_ref>`.
|
||||||
|
|
||||||
|
* :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` is the supported entry point when you want
|
||||||
|
cache control: it uses the instance cache by default, accepts an explicit
|
||||||
|
``ref_cache`` dict for per-call control, or uses ``without_cache=True`` to
|
||||||
|
run with an ephemeral cache.
|
||||||
|
|
||||||
|
|
||||||
|
References in the Test Suite
|
||||||
|
============================
|
||||||
|
|
||||||
|
These behaviours are exercised in the project's tests; see :mod:`tests.test_schema_converter`
|
||||||
|
for examples and additional usage notes.
|
||||||
@@ -1,9 +1,15 @@
|
|||||||
|
===================
|
||||||
Using Jambo
|
Using Jambo
|
||||||
===================
|
===================
|
||||||
|
|
||||||
Jambo is designed to be easy to use, it doesn't require any complex setup or configuration.
|
Jambo is designed to be easy to use. It doesn't require complex setup or configuration when not needed, while providing more powerful instance methods when you do need control.
|
||||||
Below a example of how to use Jambo to convert a JSON Schema into a Pydantic model.
|
|
||||||
|
|
||||||
|
Below is an example of how to use Jambo to convert a JSON Schema into a Pydantic model.
|
||||||
|
|
||||||
|
|
||||||
|
-------------------------
|
||||||
|
Static Method (no config)
|
||||||
|
-------------------------
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -15,8 +21,16 @@ Below a example of how to use Jambo to convert a JSON Schema into a Pydantic mod
|
|||||||
"properties": {
|
"properties": {
|
||||||
"name": {"type": "string"},
|
"name": {"type": "string"},
|
||||||
"age": {"type": "integer"},
|
"age": {"type": "integer"},
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["street", "city"],
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"required": ["name"],
|
"required": ["name", "address"],
|
||||||
}
|
}
|
||||||
|
|
||||||
Person = SchemaConverter.build(schema)
|
Person = SchemaConverter.build(schema)
|
||||||
@@ -26,16 +40,81 @@ Below a example of how to use Jambo to convert a JSON Schema into a Pydantic mod
|
|||||||
# Output: Person(name='Alice', age=30)
|
# Output: Person(name='Alice', age=30)
|
||||||
|
|
||||||
|
|
||||||
The :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>` static method takes a JSON Schema dictionary and returns a Pydantic model class. You can then instantiate this class with the required fields, and it will automatically validate the data according to the schema.
|
The :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>` static method takes a JSON Schema dictionary and returns a Pydantic model class.
|
||||||
|
|
||||||
If passed a description inside the schema it will also add it to the Pydantic model using the `description` field. This is useful for AI Frameworks as: LangChain, CrewAI and others, as they use this description for passing context to LLMs.
|
Note: the static ``build`` method was the original public API of this library. It creates and returns a model class for the provided schema but does not expose or persist an instance cache.
|
||||||
|
|
||||||
|
|
||||||
For more complex schemas and types see our documentation on
|
--------------------------------
|
||||||
|
Instance Method (with ref cache)
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from jambo import SchemaConverter
|
||||||
|
|
||||||
|
converter = SchemaConverter()
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["street", "city"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "address"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# The instance API (build_with_cache) populates the converter's instance-level reference cache
|
||||||
|
Person = converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
obj = Person(name="Alice", age=30)
|
||||||
|
print(obj)
|
||||||
|
# Output: Person(name='Alice', age=30)
|
||||||
|
|
||||||
|
# When using the converter's built-in instance cache (no ref_cache passed to the call),
|
||||||
|
# all object types parsed during the build are stored and can be retrieved via get_cached_ref.
|
||||||
|
|
||||||
|
cached_person_model = converter.get_cached_ref("Person")
|
||||||
|
assert Person is cached_person_model # the cached class is the same object that was built
|
||||||
|
|
||||||
|
# A nested/subobject type can also be retrieved from the instance cache
|
||||||
|
cached_address_model = converter.get_cached_ref("Person.address")
|
||||||
|
|
||||||
|
|
||||||
|
The :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` instance method was added after the
|
||||||
|
initial static API to make it easier to access and reuse subtypes defined in a schema.
|
||||||
|
Unlike the original static :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>`,
|
||||||
|
the instance method persists and exposes the reference cache and provides helpers such as
|
||||||
|
:py:meth:`SchemaConverter.get_cached_ref <jambo.SchemaConverter.get_cached_ref>` and
|
||||||
|
:py:meth:`SchemaConverter.clear_ref_cache <jambo.SchemaConverter.clear_ref_cache>`.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
The instance API with reference cache can lead to schema and type name collisions if not managed carefully.
|
||||||
|
It's recommended that each schema defines its own unique namespace using the `$id` property in JSON Schema,
|
||||||
|
and then access it's ref_cache by passing it explicitly when needed.
|
||||||
|
|
||||||
|
For details and examples about the reference cache and the different cache modes (instance cache, per-call cache, ephemeral cache), see:
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
usage.ref_cache
|
||||||
|
|
||||||
|
|
||||||
|
Type System
|
||||||
|
-----------
|
||||||
|
|
||||||
|
For a full explanation of the supported schemas and types see our documentation on types:
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
:caption: Contents:
|
|
||||||
|
|
||||||
usage.string
|
usage.string
|
||||||
usage.numeric
|
usage.numeric
|
||||||
|
|||||||
@@ -18,6 +18,9 @@ class GenericTypeParser(ABC, Generic[T]):
|
|||||||
default_mappings = {
|
default_mappings = {
|
||||||
"default": "default",
|
"default": "default",
|
||||||
"description": "description",
|
"description": "description",
|
||||||
|
"examples": "examples",
|
||||||
|
"title": "title",
|
||||||
|
"deprecated": "deprecated",
|
||||||
}
|
}
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@@ -51,6 +54,11 @@ class GenericTypeParser(ABC, Generic[T]):
|
|||||||
"Default value is not valid", invalid_field=name
|
"Default value is not valid", invalid_field=name
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if not self._validate_examples(parsed_type, parsed_properties):
|
||||||
|
raise InvalidSchemaException(
|
||||||
|
"Examples values are not valid", invalid_field=name
|
||||||
|
)
|
||||||
|
|
||||||
return parsed_type, parsed_properties
|
return parsed_type, parsed_properties
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -65,10 +73,39 @@ class GenericTypeParser(ABC, Generic[T]):
|
|||||||
:param kwargs: Additional options for type parsing.
|
:param kwargs: Additional options for type parsing.
|
||||||
:return: A tuple containing the type and its properties.
|
:return: A tuple containing the type and its properties.
|
||||||
"""
|
"""
|
||||||
parser = cls._get_impl(properties)
|
|
||||||
|
parser = cls._get_impl(cls._normalize_properties(properties))
|
||||||
|
|
||||||
return parser().from_properties(name=name, properties=properties, **kwargs)
|
return parser().from_properties(name=name, properties=properties, **kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _normalize_properties(properties: JSONSchema) -> JSONSchema:
|
||||||
|
"""
|
||||||
|
Normalizes the properties dictionary to ensure consistent structure.
|
||||||
|
:param properties: The properties to be normalized.
|
||||||
|
"""
|
||||||
|
type_value = properties.pop("type", None)
|
||||||
|
|
||||||
|
if isinstance(type_value, str):
|
||||||
|
properties["type"] = type_value
|
||||||
|
return properties
|
||||||
|
|
||||||
|
if isinstance(type_value, list) and len(type_value) == 0:
|
||||||
|
raise InvalidSchemaException(
|
||||||
|
"Invalid schema: 'type' list cannot be empty",
|
||||||
|
invalid_field=str(properties),
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(type_value, list) and len(type_value) == 1:
|
||||||
|
properties["type"] = type_value[0]
|
||||||
|
return properties
|
||||||
|
|
||||||
|
if isinstance(type_value, list):
|
||||||
|
properties["anyOf"] = [{"type": t} for t in type_value]
|
||||||
|
return properties
|
||||||
|
|
||||||
|
return properties
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_impl(cls, properties: JSONSchema) -> type[Self]:
|
def _get_impl(cls, properties: JSONSchema) -> type[Self]:
|
||||||
for subcls in cls.__subclasses__():
|
for subcls in cls.__subclasses__():
|
||||||
@@ -120,6 +157,25 @@ class GenericTypeParser(ABC, Generic[T]):
|
|||||||
if value is None:
|
if value is None:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
return GenericTypeParser._is_valid_value(field_type, field_prop, value)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_examples(field_type: T, field_prop: dict) -> bool:
|
||||||
|
examples = field_prop.get("examples")
|
||||||
|
|
||||||
|
if examples is None:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if not isinstance(examples, list):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return all(
|
||||||
|
GenericTypeParser._is_valid_value(field_type, field_prop, e)
|
||||||
|
for e in examples
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_valid_value(field_type: T, field_prop: dict, value: Any) -> bool:
|
||||||
try:
|
try:
|
||||||
field = Annotated[field_type, Field(**field_prop)] # type: ignore
|
field = Annotated[field_type, Field(**field_prop)] # type: ignore
|
||||||
TypeAdapter(field).validate_python(value)
|
TypeAdapter(field).validate_python(value)
|
||||||
|
|||||||
@@ -27,6 +27,9 @@ class AllOfTypeParser(GenericTypeParser):
|
|||||||
sub_properties
|
sub_properties
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (examples := properties.get("examples")) is not None:
|
||||||
|
combined_properties["examples"] = examples
|
||||||
|
|
||||||
return parser().from_properties_impl(name, combined_properties, **kwargs)
|
return parser().from_properties_impl(name, combined_properties, **kwargs)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
@@ -30,8 +30,10 @@ class AnyOfTypeParser(GenericTypeParser):
|
|||||||
sub_properties = properties["anyOf"]
|
sub_properties = properties["anyOf"]
|
||||||
|
|
||||||
sub_types = [
|
sub_types = [
|
||||||
GenericTypeParser.type_from_properties(name, subProperty, **kwargs)
|
GenericTypeParser.type_from_properties(
|
||||||
for subProperty in sub_properties
|
f"{name}.sub{i}", subProperty, **kwargs
|
||||||
|
)
|
||||||
|
for i, subProperty in enumerate(sub_properties)
|
||||||
]
|
]
|
||||||
|
|
||||||
if not kwargs.get("required", False):
|
if not kwargs.get("required", False):
|
||||||
@@ -40,8 +42,12 @@ class AnyOfTypeParser(GenericTypeParser):
|
|||||||
# By defining the type as Union of Annotated type we can use the Field validator
|
# By defining the type as Union of Annotated type we can use the Field validator
|
||||||
# to enforce the constraints of each union type when needed.
|
# to enforce the constraints of each union type when needed.
|
||||||
# We use Annotated to attach the Field validators to the type.
|
# We use Annotated to attach the Field validators to the type.
|
||||||
field_types = [
|
field_types = []
|
||||||
Annotated[t, Field(**v)] if v is not None else t for t, v in sub_types
|
for subType, subProp in sub_types:
|
||||||
]
|
default_value = subProp.pop("default", None)
|
||||||
|
if default_value is None:
|
||||||
|
default_value = ...
|
||||||
|
|
||||||
|
field_types.append(Annotated[subType, Field(default_value, **subProp)])
|
||||||
|
|
||||||
return Union[(*field_types,)], mapped_properties
|
return Union[(*field_types,)], mapped_properties
|
||||||
|
|||||||
@@ -2,21 +2,19 @@ from jambo.exceptions import InvalidSchemaException
|
|||||||
from jambo.parser._type_parser import GenericTypeParser
|
from jambo.parser._type_parser import GenericTypeParser
|
||||||
from jambo.types.type_parser_options import TypeParserOptions
|
from jambo.types.type_parser_options import TypeParserOptions
|
||||||
|
|
||||||
from typing_extensions import Iterable, TypeVar, Unpack
|
from typing_extensions import (
|
||||||
|
Iterable,
|
||||||
|
Unpack,
|
||||||
|
)
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
|
|
||||||
V = TypeVar("V")
|
|
||||||
|
|
||||||
|
|
||||||
class ArrayTypeParser(GenericTypeParser):
|
class ArrayTypeParser(GenericTypeParser):
|
||||||
mapped_type = list
|
mapped_type = list
|
||||||
|
|
||||||
json_schema_type = "type:array"
|
json_schema_type = "type:array"
|
||||||
|
|
||||||
default_mappings = {"description": "description"}
|
|
||||||
|
|
||||||
type_mappings = {
|
type_mappings = {
|
||||||
"maxItems": "max_length",
|
"maxItems": "max_length",
|
||||||
"minItems": "min_length",
|
"minItems": "min_length",
|
||||||
@@ -43,11 +41,18 @@ class ArrayTypeParser(GenericTypeParser):
|
|||||||
|
|
||||||
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
|
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
|
||||||
|
|
||||||
if "default" in properties or not kwargs.get("required", False):
|
if (
|
||||||
|
default_value := mapped_properties.pop("default", None)
|
||||||
|
) is not None or not kwargs.get("required", False):
|
||||||
mapped_properties["default_factory"] = self._build_default_factory(
|
mapped_properties["default_factory"] = self._build_default_factory(
|
||||||
properties.get("default"), wrapper_type
|
default_value, wrapper_type
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (example_values := mapped_properties.pop("examples", None)) is not None:
|
||||||
|
mapped_properties["examples"] = [
|
||||||
|
wrapper_type(example) for example in example_values
|
||||||
|
]
|
||||||
|
|
||||||
return field_type, mapped_properties
|
return field_type, mapped_properties
|
||||||
|
|
||||||
def _build_default_factory(self, default_list, wrapper_type):
|
def _build_default_factory(self, default_list, wrapper_type):
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ class ConstTypeParser(GenericTypeParser):
|
|||||||
default_mappings = {
|
default_mappings = {
|
||||||
"const": "default",
|
"const": "default",
|
||||||
"description": "description",
|
"description": "description",
|
||||||
|
"examples": "examples",
|
||||||
}
|
}
|
||||||
|
|
||||||
def from_properties_impl(
|
def from_properties_impl(
|
||||||
|
|||||||
@@ -36,9 +36,16 @@ class EnumTypeParser(GenericTypeParser):
|
|||||||
|
|
||||||
# Create a new Enum type dynamically
|
# Create a new Enum type dynamically
|
||||||
enum_type = Enum(name, {str(value).upper(): value for value in enum_values}) # type: ignore
|
enum_type = Enum(name, {str(value).upper(): value for value in enum_values}) # type: ignore
|
||||||
|
enum_type.__doc__ = properties.get("description")
|
||||||
|
|
||||||
parsed_properties = self.mappings_properties_builder(properties, **kwargs)
|
parsed_properties = self.mappings_properties_builder(properties, **kwargs)
|
||||||
|
|
||||||
if "default" in parsed_properties and parsed_properties["default"] is not None:
|
if "default" in parsed_properties and parsed_properties["default"] is not None:
|
||||||
parsed_properties["default"] = enum_type(parsed_properties["default"])
|
parsed_properties["default"] = enum_type(parsed_properties["default"])
|
||||||
|
|
||||||
|
if "examples" in parsed_properties:
|
||||||
|
parsed_properties["examples"] = [
|
||||||
|
enum_type(example) for example in parsed_properties["examples"]
|
||||||
|
]
|
||||||
|
|
||||||
return enum_type, parsed_properties
|
return enum_type, parsed_properties
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
from jambo.exceptions import InternalAssertionException
|
||||||
from jambo.parser._type_parser import GenericTypeParser
|
from jambo.parser._type_parser import GenericTypeParser
|
||||||
from jambo.types.json_schema_type import JSONSchema
|
from jambo.types.json_schema_type import JSONSchema
|
||||||
from jambo.types.type_parser_options import TypeParserOptions
|
from jambo.types.type_parser_options import TypeParserOptions
|
||||||
@@ -6,6 +7,8 @@ from pydantic import BaseModel, ConfigDict, Field, create_model
|
|||||||
from pydantic.fields import FieldInfo
|
from pydantic.fields import FieldInfo
|
||||||
from typing_extensions import Unpack
|
from typing_extensions import Unpack
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
class ObjectTypeParser(GenericTypeParser):
|
class ObjectTypeParser(GenericTypeParser):
|
||||||
mapped_type = object
|
mapped_type = object
|
||||||
@@ -19,15 +22,25 @@ class ObjectTypeParser(GenericTypeParser):
|
|||||||
name,
|
name,
|
||||||
properties.get("properties", {}),
|
properties.get("properties", {}),
|
||||||
properties.get("required", []),
|
properties.get("required", []),
|
||||||
|
description=properties.get("description"),
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
type_properties = {}
|
type_properties = self.mappings_properties_builder(properties, **kwargs)
|
||||||
|
|
||||||
if "default" in properties:
|
if (
|
||||||
type_properties["default_factory"] = lambda: type_parsing.model_validate(
|
default_value := type_properties.pop("default", None)
|
||||||
properties["default"]
|
) is not None or not kwargs.get("required", False):
|
||||||
|
type_properties["default_factory"] = (
|
||||||
|
lambda: type_parsing.model_validate(default_value)
|
||||||
|
if default_value is not None
|
||||||
|
else None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (example_values := type_properties.pop("examples", None)) is not None:
|
||||||
|
type_properties["examples"] = [
|
||||||
|
type_parsing.model_validate(example) for example in example_values
|
||||||
|
]
|
||||||
|
|
||||||
return type_parsing, type_properties
|
return type_parsing, type_properties
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -36,6 +49,7 @@ class ObjectTypeParser(GenericTypeParser):
|
|||||||
name: str,
|
name: str,
|
||||||
properties: dict[str, JSONSchema],
|
properties: dict[str, JSONSchema],
|
||||||
required_keys: list[str],
|
required_keys: list[str],
|
||||||
|
description: str | None = None,
|
||||||
**kwargs: Unpack[TypeParserOptions],
|
**kwargs: Unpack[TypeParserOptions],
|
||||||
) -> type[BaseModel]:
|
) -> type[BaseModel]:
|
||||||
"""
|
"""
|
||||||
@@ -45,14 +59,34 @@ class ObjectTypeParser(GenericTypeParser):
|
|||||||
:param required_keys: List of required keys in the schema.
|
:param required_keys: List of required keys in the schema.
|
||||||
:return: A Pydantic model class.
|
:return: A Pydantic model class.
|
||||||
"""
|
"""
|
||||||
model_config = ConfigDict(validate_assignment=True)
|
ref_cache = kwargs.get("ref_cache")
|
||||||
fields = cls._parse_properties(properties, required_keys, **kwargs)
|
if ref_cache is None:
|
||||||
|
raise InternalAssertionException(
|
||||||
|
"`ref_cache` must be provided in kwargs for ObjectTypeParser"
|
||||||
|
)
|
||||||
|
|
||||||
return create_model(name, __config__=model_config, **fields) # type: ignore
|
if (model := ref_cache.get(name)) is not None and isinstance(model, type):
|
||||||
|
warnings.warn(
|
||||||
|
f"Type '{name}' is already in the ref_cache and therefore cached value will be used."
|
||||||
|
" This may indicate a namming collision in the schema or just a normal optimization,"
|
||||||
|
" if this behavior is desired pass a clean ref_cache or use the param `without_cache`"
|
||||||
|
)
|
||||||
|
return model
|
||||||
|
|
||||||
|
model_config = ConfigDict(validate_assignment=True)
|
||||||
|
fields = cls._parse_properties(name, properties, required_keys, **kwargs)
|
||||||
|
|
||||||
|
model = create_model(
|
||||||
|
name, __config__=model_config, __doc__=description, **fields
|
||||||
|
) # type: ignore
|
||||||
|
ref_cache[name] = model
|
||||||
|
|
||||||
|
return model
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _parse_properties(
|
def _parse_properties(
|
||||||
cls,
|
cls,
|
||||||
|
name: str,
|
||||||
properties: dict[str, JSONSchema],
|
properties: dict[str, JSONSchema],
|
||||||
required_keys: list[str],
|
required_keys: list[str],
|
||||||
**kwargs: Unpack[TypeParserOptions],
|
**kwargs: Unpack[TypeParserOptions],
|
||||||
@@ -60,15 +94,15 @@ class ObjectTypeParser(GenericTypeParser):
|
|||||||
required_keys = required_keys or []
|
required_keys = required_keys or []
|
||||||
|
|
||||||
fields = {}
|
fields = {}
|
||||||
for name, prop in properties.items():
|
for field_name, field_prop in properties.items():
|
||||||
sub_property: TypeParserOptions = kwargs.copy()
|
sub_property: TypeParserOptions = kwargs.copy()
|
||||||
sub_property["required"] = name in required_keys
|
sub_property["required"] = field_name in required_keys
|
||||||
|
|
||||||
parsed_type, parsed_properties = GenericTypeParser.type_from_properties(
|
parsed_type, parsed_properties = GenericTypeParser.type_from_properties(
|
||||||
name,
|
f"{name}.{field_name}",
|
||||||
prop,
|
field_prop,
|
||||||
**sub_property, # type: ignore
|
**sub_property, # type: ignore
|
||||||
)
|
)
|
||||||
fields[name] = (parsed_type, Field(**parsed_properties))
|
fields[field_name] = (parsed_type, Field(**parsed_properties))
|
||||||
|
|
||||||
return fields
|
return fields
|
||||||
|
|||||||
@@ -29,11 +29,11 @@ class OneOfTypeParser(GenericTypeParser):
|
|||||||
|
|
||||||
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
|
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
|
||||||
|
|
||||||
sub_properties = properties["oneOf"]
|
|
||||||
|
|
||||||
sub_types = [
|
sub_types = [
|
||||||
GenericTypeParser.type_from_properties(name, subProperty, **kwargs)
|
GenericTypeParser.type_from_properties(
|
||||||
for subProperty in sub_properties
|
f"{name}_sub{i}", subProperty, **kwargs
|
||||||
|
)
|
||||||
|
for i, subProperty in enumerate(properties["oneOf"])
|
||||||
]
|
]
|
||||||
|
|
||||||
if not kwargs.get("required", False):
|
if not kwargs.get("required", False):
|
||||||
@@ -45,8 +45,7 @@ class OneOfTypeParser(GenericTypeParser):
|
|||||||
# they were added by OpenAPI and not all implementations may support them,
|
# they were added by OpenAPI and not all implementations may support them,
|
||||||
# and they do not always generate a model one-to-one to the Pydantic model
|
# and they do not always generate a model one-to-one to the Pydantic model
|
||||||
# TL;DR: Discriminators were added by OpenAPI and not a Official JSON Schema feature
|
# TL;DR: Discriminators were added by OpenAPI and not a Official JSON Schema feature
|
||||||
discriminator = properties.get("discriminator")
|
if (discriminator := properties.get("discriminator")) is not None:
|
||||||
if discriminator is not None:
|
|
||||||
validated_type = self._build_type_one_of_with_discriminator(
|
validated_type = self._build_type_one_of_with_discriminator(
|
||||||
subfield_types, discriminator
|
subfield_types, discriminator
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from jambo.exceptions import InternalAssertionException, InvalidSchemaException
|
from jambo.exceptions import InternalAssertionException, InvalidSchemaException
|
||||||
from jambo.parser import GenericTypeParser
|
from jambo.parser import GenericTypeParser
|
||||||
|
from jambo.types import RefCacheDict
|
||||||
from jambo.types.json_schema_type import JSONSchema
|
from jambo.types.json_schema_type import JSONSchema
|
||||||
from jambo.types.type_parser_options import TypeParserOptions
|
from jambo.types.type_parser_options import TypeParserOptions
|
||||||
|
|
||||||
@@ -72,7 +73,7 @@ class RefTypeParser(GenericTypeParser):
|
|||||||
return mapped_type
|
return mapped_type
|
||||||
|
|
||||||
def _get_ref_from_cache(
|
def _get_ref_from_cache(
|
||||||
self, ref_name: str, ref_cache: dict[str, ForwardRef | type | None]
|
self, ref_name: str, ref_cache: RefCacheDict
|
||||||
) -> RefType | type | None:
|
) -> RefType | type | None:
|
||||||
try:
|
try:
|
||||||
ref_state = ref_cache[ref_name]
|
ref_state = ref_cache[ref_name]
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from jambo.exceptions import InvalidSchemaException
|
|||||||
from jambo.parser._type_parser import GenericTypeParser
|
from jambo.parser._type_parser import GenericTypeParser
|
||||||
from jambo.types.type_parser_options import TypeParserOptions
|
from jambo.types.type_parser_options import TypeParserOptions
|
||||||
|
|
||||||
from pydantic import AnyUrl, EmailStr
|
from pydantic import AnyUrl, EmailStr, TypeAdapter, ValidationError
|
||||||
from typing_extensions import Unpack
|
from typing_extensions import Unpack
|
||||||
|
|
||||||
from datetime import date, datetime, time, timedelta
|
from datetime import date, datetime, time, timedelta
|
||||||
@@ -19,7 +19,6 @@ class StringTypeParser(GenericTypeParser):
|
|||||||
"maxLength": "max_length",
|
"maxLength": "max_length",
|
||||||
"minLength": "min_length",
|
"minLength": "min_length",
|
||||||
"pattern": "pattern",
|
"pattern": "pattern",
|
||||||
"format": "format",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
format_type_mapping = {
|
format_type_mapping = {
|
||||||
@@ -63,4 +62,19 @@ class StringTypeParser(GenericTypeParser):
|
|||||||
if format_type in self.format_pattern_mapping:
|
if format_type in self.format_pattern_mapping:
|
||||||
mapped_properties["pattern"] = self.format_pattern_mapping[format_type]
|
mapped_properties["pattern"] = self.format_pattern_mapping[format_type]
|
||||||
|
|
||||||
|
try:
|
||||||
|
if "examples" in mapped_properties:
|
||||||
|
mapped_properties["examples"] = [
|
||||||
|
TypeAdapter(mapped_type).validate_python(example)
|
||||||
|
for example in mapped_properties["examples"]
|
||||||
|
]
|
||||||
|
except ValidationError as err:
|
||||||
|
raise InvalidSchemaException(
|
||||||
|
f"Invalid example type for field {name}."
|
||||||
|
) from err
|
||||||
|
|
||||||
|
if "json_schema_extra" not in mapped_properties:
|
||||||
|
mapped_properties["json_schema_extra"] = {}
|
||||||
|
mapped_properties["json_schema_extra"]["format"] = format_type
|
||||||
|
|
||||||
return mapped_type, mapped_properties
|
return mapped_type, mapped_properties
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
from jambo.exceptions import InvalidSchemaException, UnsupportedSchemaException
|
from jambo.exceptions import InvalidSchemaException, UnsupportedSchemaException
|
||||||
from jambo.parser import ObjectTypeParser, RefTypeParser
|
from jambo.parser import ObjectTypeParser, RefTypeParser
|
||||||
from jambo.types import JSONSchema
|
from jambo.types import JSONSchema, RefCacheDict
|
||||||
|
|
||||||
from jsonschema.exceptions import SchemaError
|
from jsonschema.exceptions import SchemaError
|
||||||
from jsonschema.validators import validator_for
|
from jsonschema.validators import validator_for
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
from typing_extensions import MutableMapping, Optional
|
||||||
|
|
||||||
|
|
||||||
class SchemaConverter:
|
class SchemaConverter:
|
||||||
@@ -16,13 +17,56 @@ class SchemaConverter:
|
|||||||
fields and types. The generated model can be used for data validation and serialization.
|
fields and types. The generated model can be used for data validation and serialization.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
_namespace_registry: MutableMapping[str, RefCacheDict]
|
||||||
def build(schema: JSONSchema) -> type[BaseModel]:
|
|
||||||
|
def __init__(
|
||||||
|
self, namespace_registry: Optional[MutableMapping[str, RefCacheDict]] = None
|
||||||
|
) -> None:
|
||||||
|
if namespace_registry is None:
|
||||||
|
namespace_registry = dict()
|
||||||
|
self._namespace_registry = namespace_registry
|
||||||
|
|
||||||
|
def build_with_cache(
|
||||||
|
self,
|
||||||
|
schema: JSONSchema,
|
||||||
|
ref_cache: Optional[RefCacheDict] = None,
|
||||||
|
without_cache: bool = False,
|
||||||
|
) -> type[BaseModel]:
|
||||||
"""
|
"""
|
||||||
Converts a JSON Schema to a Pydantic model.
|
Converts a JSON Schema to a Pydantic model.
|
||||||
:param schema: The JSON Schema to convert.
|
This is the instance method version of `build` and uses the instance's reference cache if none is provided.
|
||||||
:return: A Pydantic model class.
|
Use this method if you want to utilize the instance's reference cache.
|
||||||
|
|
||||||
|
:param schema: The JSON Schema to convert.
|
||||||
|
:param ref_cache: An optional reference cache to use during conversion.
|
||||||
|
:param without_cache: Whether to use a clean reference cache for this conversion.
|
||||||
|
:return: The generated Pydantic model.
|
||||||
"""
|
"""
|
||||||
|
local_ref_cache: RefCacheDict
|
||||||
|
|
||||||
|
if without_cache:
|
||||||
|
local_ref_cache = dict()
|
||||||
|
elif ref_cache is None:
|
||||||
|
namespace = schema.get("$id", "default")
|
||||||
|
local_ref_cache = self._namespace_registry.setdefault(namespace, dict())
|
||||||
|
else:
|
||||||
|
local_ref_cache = ref_cache
|
||||||
|
|
||||||
|
return self.build(schema, local_ref_cache)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build(
|
||||||
|
schema: JSONSchema, ref_cache: Optional[RefCacheDict] = None
|
||||||
|
) -> type[BaseModel]:
|
||||||
|
"""
|
||||||
|
Converts a JSON Schema to a Pydantic model.
|
||||||
|
This method doesn't use a reference cache if none is provided.
|
||||||
|
:param schema: The JSON Schema to convert.
|
||||||
|
:param ref_cache: An optional reference cache to use during conversion, if provided `with_clean_cache` will be ignored.
|
||||||
|
:return: The generated Pydantic model.
|
||||||
|
"""
|
||||||
|
if ref_cache is None:
|
||||||
|
ref_cache = dict()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
validator = validator_for(schema)
|
validator = validator_for(schema)
|
||||||
@@ -45,8 +89,9 @@ class SchemaConverter:
|
|||||||
schema["title"],
|
schema["title"],
|
||||||
schema.get("properties", {}),
|
schema.get("properties", {}),
|
||||||
schema.get("required", []),
|
schema.get("required", []),
|
||||||
|
description=schema.get("description"),
|
||||||
context=schema,
|
context=schema,
|
||||||
ref_cache=dict(),
|
ref_cache=ref_cache,
|
||||||
required=True,
|
required=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -55,7 +100,7 @@ class SchemaConverter:
|
|||||||
schema["title"],
|
schema["title"],
|
||||||
schema,
|
schema,
|
||||||
context=schema,
|
context=schema,
|
||||||
ref_cache=dict(),
|
ref_cache=ref_cache,
|
||||||
required=True,
|
required=True,
|
||||||
)
|
)
|
||||||
return parsed_model
|
return parsed_model
|
||||||
@@ -68,6 +113,32 @@ class SchemaConverter:
|
|||||||
unsupported_field=unsupported_type,
|
unsupported_field=unsupported_type,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def clear_ref_cache(self, namespace: Optional[str] = "default") -> None:
|
||||||
|
"""
|
||||||
|
Clears the reference cache.
|
||||||
|
"""
|
||||||
|
if namespace is None:
|
||||||
|
self._namespace_registry.clear()
|
||||||
|
return
|
||||||
|
|
||||||
|
if namespace in self._namespace_registry:
|
||||||
|
self._namespace_registry[namespace].clear()
|
||||||
|
|
||||||
|
def get_cached_ref(
|
||||||
|
self, ref_name: str, namespace: str = "default"
|
||||||
|
) -> Optional[type]:
|
||||||
|
"""
|
||||||
|
Gets a cached reference from the reference cache.
|
||||||
|
:param ref_name: The name of the reference to get.
|
||||||
|
:return: The cached reference, or None if not found.
|
||||||
|
"""
|
||||||
|
cached_type = self._namespace_registry.get(namespace, {}).get(ref_name)
|
||||||
|
|
||||||
|
if isinstance(cached_type, type):
|
||||||
|
return cached_type
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_schema_type(schema: JSONSchema) -> str | None:
|
def _get_schema_type(schema: JSONSchema) -> str | None:
|
||||||
"""
|
"""
|
||||||
@@ -78,4 +149,11 @@ class SchemaConverter:
|
|||||||
if "$ref" in schema:
|
if "$ref" in schema:
|
||||||
return "$ref"
|
return "$ref"
|
||||||
|
|
||||||
return schema.get("type")
|
type_value = schema.get("type")
|
||||||
|
if isinstance(type_value, list):
|
||||||
|
raise InvalidSchemaException(
|
||||||
|
"Invalid schema: 'type' cannot be a list at the top level",
|
||||||
|
invalid_field=str(schema),
|
||||||
|
)
|
||||||
|
|
||||||
|
return type_value
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from .json_schema_type import (
|
|||||||
JSONSchemaType,
|
JSONSchemaType,
|
||||||
JSONType,
|
JSONType,
|
||||||
)
|
)
|
||||||
from .type_parser_options import TypeParserOptions
|
from .type_parser_options import RefCacheDict, TypeParserOptions
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
@@ -12,5 +12,6 @@ __all__ = [
|
|||||||
"JSONSchemaNativeTypes",
|
"JSONSchemaNativeTypes",
|
||||||
"JSONType",
|
"JSONType",
|
||||||
"JSONSchema",
|
"JSONSchema",
|
||||||
|
"RefCacheDict",
|
||||||
"TypeParserOptions",
|
"TypeParserOptions",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ JSONSchema = TypedDict(
|
|||||||
"description": str,
|
"description": str,
|
||||||
"default": JSONType,
|
"default": JSONType,
|
||||||
"examples": List[JSONType],
|
"examples": List[JSONType],
|
||||||
"type": JSONSchemaType,
|
"type": JSONSchemaType | List[JSONSchemaType],
|
||||||
"enum": List[JSONType],
|
"enum": List[JSONType],
|
||||||
"const": JSONType,
|
"const": JSONType,
|
||||||
"properties": Dict[str, "JSONSchema"],
|
"properties": Dict[str, "JSONSchema"],
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
from jambo.types.json_schema_type import JSONSchema
|
from jambo.types.json_schema_type import JSONSchema
|
||||||
|
|
||||||
from typing_extensions import ForwardRef, TypedDict
|
from typing_extensions import ForwardRef, MutableMapping, TypedDict
|
||||||
|
|
||||||
|
|
||||||
|
RefCacheDict = MutableMapping[str, ForwardRef | type | None]
|
||||||
|
|
||||||
|
|
||||||
class TypeParserOptions(TypedDict):
|
class TypeParserOptions(TypedDict):
|
||||||
required: bool
|
required: bool
|
||||||
context: JSONSchema
|
context: JSONSchema
|
||||||
ref_cache: dict[str, ForwardRef | type | None]
|
ref_cache: RefCacheDict
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ classifiers = [
|
|||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
"Programming Language :: Python :: 3.13",
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Programming Language :: Python :: 3.14",
|
||||||
]
|
]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@@ -25,7 +26,7 @@ readme = "README.md"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"email-validator>=2.2.0",
|
"email-validator>=2.2.0",
|
||||||
"jsonschema>=4.23.0",
|
"jsonschema>=4.23.0",
|
||||||
"pydantic>=2.10.6",
|
"pydantic>=2.12.4",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependency-groups]
|
[dependency-groups]
|
||||||
@@ -37,6 +38,7 @@ dev = [
|
|||||||
"ruff>=0.11.4",
|
"ruff>=0.11.4",
|
||||||
"sphinx>=8.1.3",
|
"sphinx>=8.1.3",
|
||||||
"sphinx-autobuild>=2024.10.3",
|
"sphinx-autobuild>=2024.10.3",
|
||||||
|
"sphinx-autodoc-typehints>=3.0.1",
|
||||||
"sphinx-rtd-theme>=3.0.2",
|
"sphinx-rtd-theme>=3.0.2",
|
||||||
"types-jsonschema>=4.25.1.20250822",
|
"types-jsonschema>=4.25.1.20250822",
|
||||||
]
|
]
|
||||||
@@ -86,3 +88,8 @@ section-order=[
|
|||||||
"standard-library",
|
"standard-library",
|
||||||
]
|
]
|
||||||
lines-after-imports = 2
|
lines-after-imports = 2
|
||||||
|
|
||||||
|
|
||||||
|
[tool.pyright]
|
||||||
|
venvPath = "."
|
||||||
|
venv = ".venv"
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = AllOfTypeParser().from_properties(
|
type_parsing, type_validator = AllOfTypeParser().from_properties(
|
||||||
"placeholder", properties
|
"placeholder", properties, ref_cache={}
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
@@ -87,7 +87,7 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = AllOfTypeParser().from_properties(
|
type_parsing, type_validator = AllOfTypeParser().from_properties(
|
||||||
"placeholder", properties
|
"placeholder", properties, ref_cache={}
|
||||||
)
|
)
|
||||||
|
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
@@ -116,7 +116,7 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = AllOfTypeParser().from_properties(
|
type_parsing, type_validator = AllOfTypeParser().from_properties(
|
||||||
"placeholder", properties
|
"placeholder", properties, ref_cache={}
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, str)
|
self.assertEqual(type_parsing, str)
|
||||||
@@ -137,7 +137,7 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = AllOfTypeParser().from_properties(
|
type_parsing, type_validator = AllOfTypeParser().from_properties(
|
||||||
"placeholder", properties
|
"placeholder", properties, ref_cache={}
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, str)
|
self.assertEqual(type_parsing, str)
|
||||||
@@ -158,7 +158,7 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
AllOfTypeParser().from_properties("placeholder", properties)
|
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
|
||||||
|
|
||||||
def test_all_of_invalid_type_not_present(self):
|
def test_all_of_invalid_type_not_present(self):
|
||||||
properties = {
|
properties = {
|
||||||
@@ -171,7 +171,7 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
AllOfTypeParser().from_properties("placeholder", properties)
|
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
|
||||||
|
|
||||||
def test_all_of_invalid_type_in_fields(self):
|
def test_all_of_invalid_type_in_fields(self):
|
||||||
properties = {
|
properties = {
|
||||||
@@ -184,7 +184,7 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
AllOfTypeParser().from_properties("placeholder", properties)
|
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
|
||||||
|
|
||||||
def test_all_of_invalid_type_not_all_equal(self):
|
def test_all_of_invalid_type_not_all_equal(self):
|
||||||
"""
|
"""
|
||||||
@@ -200,7 +200,7 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
AllOfTypeParser().from_properties("placeholder", properties)
|
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
|
||||||
|
|
||||||
def test_all_of_description_field(self):
|
def test_all_of_description_field(self):
|
||||||
"""
|
"""
|
||||||
@@ -237,7 +237,9 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, _ = AllOfTypeParser().from_properties("placeholder", properties)
|
type_parsing, _ = AllOfTypeParser().from_properties(
|
||||||
|
"placeholder", properties, ref_cache={}
|
||||||
|
)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
type_parsing.model_json_schema()["properties"]["name"]["description"],
|
type_parsing.model_json_schema()["properties"]["name"]["description"],
|
||||||
@@ -275,7 +277,9 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, _ = AllOfTypeParser().from_properties("placeholder", properties)
|
type_parsing, _ = AllOfTypeParser().from_properties(
|
||||||
|
"placeholder", properties, ref_cache={}
|
||||||
|
)
|
||||||
obj = type_parsing()
|
obj = type_parsing()
|
||||||
self.assertEqual(obj.name, "John")
|
self.assertEqual(obj.name, "John")
|
||||||
self.assertEqual(obj.age, 30)
|
self.assertEqual(obj.age, 30)
|
||||||
@@ -308,4 +312,51 @@ class TestAllOfTypeParser(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
AllOfTypeParser().from_properties("placeholder", properties)
|
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
|
||||||
|
|
||||||
|
def test_all_of_with_root_examples(self):
|
||||||
|
"""
|
||||||
|
Tests the AllOfTypeParser with examples.
|
||||||
|
"""
|
||||||
|
|
||||||
|
properties = {
|
||||||
|
"type": "object",
|
||||||
|
"allOf": [
|
||||||
|
{
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"minLength": 1,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"maxLength": 4,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"examples": [
|
||||||
|
{"name": "John"},
|
||||||
|
{"name": "Jane"},
|
||||||
|
{"name": "Doe"},
|
||||||
|
{"name": "Jack"},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
type_parsed, type_properties = AllOfTypeParser().from_properties(
|
||||||
|
"placeholder", properties, ref_cache={}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
type_properties["examples"],
|
||||||
|
[
|
||||||
|
type_parsed(name="John"),
|
||||||
|
type_parsed(name="Jane"),
|
||||||
|
type_parsed(name="Doe"),
|
||||||
|
type_parsed(name="Jack"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|||||||
@@ -98,3 +98,46 @@ class TestAnyOfTypeParser(TestCase):
|
|||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
AnyOfTypeParser().from_properties("placeholder", properties)
|
AnyOfTypeParser().from_properties("placeholder", properties)
|
||||||
|
|
||||||
|
def test_anyof_with_examples(self):
|
||||||
|
"""
|
||||||
|
Tests the AnyOfTypeParser with a string or int type and examples.
|
||||||
|
"""
|
||||||
|
|
||||||
|
properties = {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"examples": ["example string"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "integer",
|
||||||
|
"examples": [123],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
parsed_type, _ = AnyOfTypeParser().from_properties("placeholder", properties)
|
||||||
|
|
||||||
|
type_1, type_2 = get_args(parsed_type)
|
||||||
|
|
||||||
|
self.assertEqual(get_args(type_1)[1].examples, ["example string"])
|
||||||
|
|
||||||
|
self.assertEqual(get_args(type_2)[1].examples, [123])
|
||||||
|
|
||||||
|
def test_any_of_with_root_examples(self):
|
||||||
|
"""
|
||||||
|
Tests the AnyOfTypeParser with a string or int type and examples.
|
||||||
|
"""
|
||||||
|
|
||||||
|
properties = {
|
||||||
|
"anyOf": [
|
||||||
|
{"type": "string"},
|
||||||
|
{"type": "integer"},
|
||||||
|
],
|
||||||
|
"examples": ["100", 100],
|
||||||
|
}
|
||||||
|
|
||||||
|
_, type_validator = AnyOfTypeParser().from_properties("placeholder", properties)
|
||||||
|
|
||||||
|
self.assertEqual(type_validator["examples"], ["100", 100])
|
||||||
|
|||||||
@@ -109,3 +109,19 @@ class TestArrayTypeParser(TestCase):
|
|||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
parser.from_properties("placeholder", properties)
|
parser.from_properties("placeholder", properties)
|
||||||
|
|
||||||
|
def test_array_parser_with_examples(self):
|
||||||
|
parser = ArrayTypeParser()
|
||||||
|
|
||||||
|
properties = {
|
||||||
|
"items": {"type": "integer"},
|
||||||
|
"examples": [
|
||||||
|
[1, 2, 3],
|
||||||
|
[4, 5, 6],
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
|
|
||||||
|
self.assertEqual(type_parsing.__origin__, list)
|
||||||
|
self.assertEqual(type_validator["examples"], [[1, 2, 3], [4, 5, 6]])
|
||||||
|
|||||||
@@ -42,3 +42,19 @@ class TestBoolTypeParser(TestCase):
|
|||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
parser.from_properties_impl("placeholder", properties)
|
parser.from_properties_impl("placeholder", properties)
|
||||||
|
|
||||||
|
def test_bool_parser_with_examples(self):
|
||||||
|
parser = BooleanTypeParser()
|
||||||
|
|
||||||
|
properties = {
|
||||||
|
"type": "boolean",
|
||||||
|
"examples": [True, False],
|
||||||
|
}
|
||||||
|
|
||||||
|
type_parsing, type_validator = parser.from_properties_impl(
|
||||||
|
"placeholder", properties
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(type_parsing, bool)
|
||||||
|
self.assertEqual(type_validator["default"], None)
|
||||||
|
self.assertEqual(type_validator["examples"], [True, False])
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ class TestConstTypeParser(TestCase):
|
|||||||
parser = ConstTypeParser()
|
parser = ConstTypeParser()
|
||||||
|
|
||||||
expected_const_value = "United States of America"
|
expected_const_value = "United States of America"
|
||||||
properties = {"const": expected_const_value}
|
properties = {"const": expected_const_value, "examples": [expected_const_value]}
|
||||||
|
|
||||||
parsed_type, parsed_properties = parser.from_properties_impl(
|
parsed_type, parsed_properties = parser.from_properties_impl(
|
||||||
"country", properties
|
"country", properties
|
||||||
@@ -23,13 +23,14 @@ class TestConstTypeParser(TestCase):
|
|||||||
self.assertEqual(get_args(parsed_type), (expected_const_value,))
|
self.assertEqual(get_args(parsed_type), (expected_const_value,))
|
||||||
|
|
||||||
self.assertEqual(parsed_properties["default"], expected_const_value)
|
self.assertEqual(parsed_properties["default"], expected_const_value)
|
||||||
|
self.assertEqual(parsed_properties["examples"], [expected_const_value])
|
||||||
|
|
||||||
def test_const_type_parser_non_hashable_value(self):
|
def test_const_type_parser_non_hashable_value(self):
|
||||||
"""Test const parser with non-hashable values (uses Annotated with validator)"""
|
"""Test const parser with non-hashable values (uses Annotated with validator)"""
|
||||||
parser = ConstTypeParser()
|
parser = ConstTypeParser()
|
||||||
|
|
||||||
expected_const_value = [1, 2, 3] # Lists are not hashable
|
expected_const_value = [1, 2, 3] # Lists are not hashable
|
||||||
properties = {"const": expected_const_value}
|
properties = {"const": expected_const_value, "examples": [expected_const_value]}
|
||||||
|
|
||||||
parsed_type, parsed_properties = parser.from_properties_impl(
|
parsed_type, parsed_properties = parser.from_properties_impl(
|
||||||
"list_const", properties
|
"list_const", properties
|
||||||
@@ -40,13 +41,14 @@ class TestConstTypeParser(TestCase):
|
|||||||
self.assertIn(list, get_args(parsed_type))
|
self.assertIn(list, get_args(parsed_type))
|
||||||
|
|
||||||
self.assertEqual(parsed_properties["default"], expected_const_value)
|
self.assertEqual(parsed_properties["default"], expected_const_value)
|
||||||
|
self.assertEqual(parsed_properties["examples"], [expected_const_value])
|
||||||
|
|
||||||
def test_const_type_parser_integer_value(self):
|
def test_const_type_parser_integer_value(self):
|
||||||
"""Test const parser with integer values (uses Literal)"""
|
"""Test const parser with integer values (uses Literal)"""
|
||||||
parser = ConstTypeParser()
|
parser = ConstTypeParser()
|
||||||
|
|
||||||
expected_const_value = 42
|
expected_const_value = 42
|
||||||
properties = {"const": expected_const_value}
|
properties = {"const": expected_const_value, "examples": [expected_const_value]}
|
||||||
|
|
||||||
parsed_type, parsed_properties = parser.from_properties_impl(
|
parsed_type, parsed_properties = parser.from_properties_impl(
|
||||||
"int_const", properties
|
"int_const", properties
|
||||||
@@ -57,13 +59,14 @@ class TestConstTypeParser(TestCase):
|
|||||||
self.assertEqual(get_args(parsed_type), (expected_const_value,))
|
self.assertEqual(get_args(parsed_type), (expected_const_value,))
|
||||||
|
|
||||||
self.assertEqual(parsed_properties["default"], expected_const_value)
|
self.assertEqual(parsed_properties["default"], expected_const_value)
|
||||||
|
self.assertEqual(parsed_properties["examples"], [expected_const_value])
|
||||||
|
|
||||||
def test_const_type_parser_boolean_value(self):
|
def test_const_type_parser_boolean_value(self):
|
||||||
"""Test const parser with boolean values (uses Literal)"""
|
"""Test const parser with boolean values (uses Literal)"""
|
||||||
parser = ConstTypeParser()
|
parser = ConstTypeParser()
|
||||||
|
|
||||||
expected_const_value = True
|
expected_const_value = True
|
||||||
properties = {"const": expected_const_value}
|
properties = {"const": expected_const_value, "examples": [expected_const_value]}
|
||||||
|
|
||||||
parsed_type, parsed_properties = parser.from_properties_impl(
|
parsed_type, parsed_properties = parser.from_properties_impl(
|
||||||
"bool_const", properties
|
"bool_const", properties
|
||||||
@@ -74,6 +77,7 @@ class TestConstTypeParser(TestCase):
|
|||||||
self.assertEqual(get_args(parsed_type), (expected_const_value,))
|
self.assertEqual(get_args(parsed_type), (expected_const_value,))
|
||||||
|
|
||||||
self.assertEqual(parsed_properties["default"], expected_const_value)
|
self.assertEqual(parsed_properties["default"], expected_const_value)
|
||||||
|
self.assertEqual(parsed_properties["examples"], [expected_const_value])
|
||||||
|
|
||||||
def test_const_type_parser_invalid_properties(self):
|
def test_const_type_parser_invalid_properties(self):
|
||||||
parser = ConstTypeParser()
|
parser = ConstTypeParser()
|
||||||
|
|||||||
@@ -49,6 +49,20 @@ class TestEnumTypeParser(TestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(parsed_properties, {"default": None})
|
self.assertEqual(parsed_properties, {"default": None})
|
||||||
|
|
||||||
|
def test_enum_type_parser_creates_enum_with_description(self):
|
||||||
|
parser = EnumTypeParser()
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
"description": "an enum",
|
||||||
|
"enum": ["value1"],
|
||||||
|
}
|
||||||
|
|
||||||
|
parsed_type, parsed_properties = parser.from_properties_impl(
|
||||||
|
"TestEnum",
|
||||||
|
schema,
|
||||||
|
)
|
||||||
|
self.assertEqual(parsed_type.__doc__, "an enum")
|
||||||
|
|
||||||
def test_enum_type_parser_creates_enum_with_default(self):
|
def test_enum_type_parser_creates_enum_with_default(self):
|
||||||
parser = EnumTypeParser()
|
parser = EnumTypeParser()
|
||||||
|
|
||||||
@@ -89,3 +103,27 @@ class TestEnumTypeParser(TestCase):
|
|||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
parser.from_properties_impl("TestEnum", schema)
|
parser.from_properties_impl("TestEnum", schema)
|
||||||
|
|
||||||
|
def test_enum_type_parser_creates_enum_with_examples(self):
|
||||||
|
parser = EnumTypeParser()
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
"enum": ["value1", "value2", "value3"],
|
||||||
|
"examples": ["value1", "value3"],
|
||||||
|
}
|
||||||
|
|
||||||
|
parsed_type, parsed_properties = parser.from_properties_impl(
|
||||||
|
"TestEnum",
|
||||||
|
schema,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertIsInstance(parsed_type, type)
|
||||||
|
self.assertTrue(issubclass(parsed_type, Enum))
|
||||||
|
self.assertEqual(
|
||||||
|
set(parsed_type.__members__.keys()), {"VALUE1", "VALUE2", "VALUE3"}
|
||||||
|
)
|
||||||
|
self.assertEqual(parsed_properties["default"], None)
|
||||||
|
self.assertEqual(
|
||||||
|
parsed_properties["examples"],
|
||||||
|
[getattr(parsed_type, "VALUE1"), getattr(parsed_type, "VALUE3")],
|
||||||
|
)
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ class TestFloatTypeParser(TestCase):
|
|||||||
"maximum": 10.5,
|
"maximum": 10.5,
|
||||||
"minimum": 1.0,
|
"minimum": 1.0,
|
||||||
"multipleOf": 0.5,
|
"multipleOf": 0.5,
|
||||||
|
"examples": [1.5, 2.5],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
@@ -31,6 +32,7 @@ class TestFloatTypeParser(TestCase):
|
|||||||
self.assertEqual(type_validator["le"], 10.5)
|
self.assertEqual(type_validator["le"], 10.5)
|
||||||
self.assertEqual(type_validator["ge"], 1.0)
|
self.assertEqual(type_validator["ge"], 1.0)
|
||||||
self.assertEqual(type_validator["multiple_of"], 0.5)
|
self.assertEqual(type_validator["multiple_of"], 0.5)
|
||||||
|
self.assertEqual(type_validator["examples"], [1.5, 2.5])
|
||||||
|
|
||||||
def test_float_parser_with_default(self):
|
def test_float_parser_with_default(self):
|
||||||
parser = FloatTypeParser()
|
parser = FloatTypeParser()
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ class TestIntTypeParser(TestCase):
|
|||||||
"maximum": 10,
|
"maximum": 10,
|
||||||
"minimum": 1,
|
"minimum": 1,
|
||||||
"multipleOf": 2,
|
"multipleOf": 2,
|
||||||
|
"examples": [2, 4],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
@@ -31,6 +32,7 @@ class TestIntTypeParser(TestCase):
|
|||||||
self.assertEqual(type_validator["le"], 10)
|
self.assertEqual(type_validator["le"], 10)
|
||||||
self.assertEqual(type_validator["ge"], 1)
|
self.assertEqual(type_validator["ge"], 1)
|
||||||
self.assertEqual(type_validator["multiple_of"], 2)
|
self.assertEqual(type_validator["multiple_of"], 2)
|
||||||
|
self.assertEqual(type_validator["examples"], [2, 4])
|
||||||
|
|
||||||
def test_int_parser_with_default(self):
|
def test_int_parser_with_default(self):
|
||||||
parser = IntTypeParser()
|
parser = IntTypeParser()
|
||||||
|
|||||||
@@ -16,6 +16,22 @@ class TestNullTypeParser(TestCase):
|
|||||||
self.assertEqual(type_parsing, type(None))
|
self.assertEqual(type_parsing, type(None))
|
||||||
self.assertEqual(type_validator, {"default": None})
|
self.assertEqual(type_validator, {"default": None})
|
||||||
|
|
||||||
|
def test_null_parser_with_examples(self):
|
||||||
|
parser = NullTypeParser()
|
||||||
|
|
||||||
|
properties = {
|
||||||
|
"type": "null",
|
||||||
|
"examples": [None],
|
||||||
|
}
|
||||||
|
|
||||||
|
type_parsing, type_validator = parser.from_properties_impl(
|
||||||
|
"placeholder", properties
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(type_parsing, type(None))
|
||||||
|
self.assertEqual(type_validator["default"], None)
|
||||||
|
self.assertEqual(type_validator["examples"], [None])
|
||||||
|
|
||||||
def test_null_parser_with_invalid_default(self):
|
def test_null_parser_with_invalid_default(self):
|
||||||
parser = NullTypeParser()
|
parser = NullTypeParser()
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
|
from jambo.exceptions import InternalAssertionException
|
||||||
from jambo.parser import ObjectTypeParser
|
from jambo.parser import ObjectTypeParser
|
||||||
|
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
|
||||||
|
|
||||||
class TestObjectTypeParser(TestCase):
|
class TestObjectTypeParser(TestCase):
|
||||||
def test_object_type_parser(self):
|
def test_object_type_parser_throws_without_ref_cache(self):
|
||||||
parser = ObjectTypeParser()
|
parser = ObjectTypeParser()
|
||||||
|
|
||||||
properties = {
|
properties = {
|
||||||
@@ -15,13 +16,57 @@ class TestObjectTypeParser(TestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
Model, _args = parser.from_properties_impl("placeholder", properties)
|
with self.assertRaises(InternalAssertionException):
|
||||||
|
parser.from_properties_impl("placeholder", properties)
|
||||||
|
|
||||||
|
def test_object_type_parser(self):
|
||||||
|
parser = ObjectTypeParser()
|
||||||
|
|
||||||
|
properties = {
|
||||||
|
"type": "object",
|
||||||
|
"description": "obj desc",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
Model, _args = parser.from_properties_impl(
|
||||||
|
"placeholder", properties, ref_cache={}
|
||||||
|
)
|
||||||
|
self.assertEqual(Model.__doc__, "obj desc")
|
||||||
|
|
||||||
obj = Model(name="name", age=10)
|
obj = Model(name="name", age=10)
|
||||||
|
|
||||||
self.assertEqual(obj.name, "name")
|
self.assertEqual(obj.name, "name")
|
||||||
self.assertEqual(obj.age, 10)
|
self.assertEqual(obj.age, 10)
|
||||||
|
|
||||||
|
def test_object_type_parser_with_object_example(self):
|
||||||
|
parser = ObjectTypeParser()
|
||||||
|
|
||||||
|
properties = {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{
|
||||||
|
"name": "example_name",
|
||||||
|
"age": 30,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
_, type_validator = parser.from_properties_impl(
|
||||||
|
"placeholder", properties, ref_cache={}
|
||||||
|
)
|
||||||
|
|
||||||
|
test_example = type_validator["examples"][0]
|
||||||
|
|
||||||
|
self.assertEqual(test_example.name, "example_name")
|
||||||
|
self.assertEqual(test_example.age, 30)
|
||||||
|
|
||||||
def test_object_type_parser_with_default(self):
|
def test_object_type_parser_with_default(self):
|
||||||
parser = ObjectTypeParser()
|
parser = ObjectTypeParser()
|
||||||
|
|
||||||
@@ -37,7 +82,9 @@ class TestObjectTypeParser(TestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
_, type_validator = parser.from_properties_impl("placeholder", properties)
|
_, type_validator = parser.from_properties_impl(
|
||||||
|
"placeholder", properties, ref_cache={}
|
||||||
|
)
|
||||||
|
|
||||||
# Check default value
|
# Check default value
|
||||||
default_obj = type_validator["default_factory"]()
|
default_obj = type_validator["default_factory"]()
|
||||||
@@ -47,3 +94,18 @@ class TestObjectTypeParser(TestCase):
|
|||||||
# Chekc default factory new object id
|
# Chekc default factory new object id
|
||||||
new_obj = type_validator["default_factory"]()
|
new_obj = type_validator["default_factory"]()
|
||||||
self.assertNotEqual(id(default_obj), id(new_obj))
|
self.assertNotEqual(id(default_obj), id(new_obj))
|
||||||
|
|
||||||
|
def test_object_type_parser_warns_if_object_override_in_cache(self):
|
||||||
|
ref_cache = {}
|
||||||
|
|
||||||
|
parser = ObjectTypeParser()
|
||||||
|
|
||||||
|
properties = {"type": "object", "properties": {}}
|
||||||
|
|
||||||
|
with self.assertWarns(UserWarning):
|
||||||
|
_, type_validator = parser.from_properties_impl(
|
||||||
|
"placeholder", properties, ref_cache=ref_cache
|
||||||
|
)
|
||||||
|
_, type_validator = parser.from_properties_impl(
|
||||||
|
"placeholder", properties, ref_cache=ref_cache
|
||||||
|
)
|
||||||
|
|||||||
@@ -532,3 +532,71 @@ class TestOneOfTypeParser(TestCase):
|
|||||||
# Invalid: Wrong properties for the type
|
# Invalid: Wrong properties for the type
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
Model(shape={"type": "circle", "width": 10})
|
Model(shape={"type": "circle", "width": 10})
|
||||||
|
|
||||||
|
def test_oneof_with_examples(self):
|
||||||
|
schema = {
|
||||||
|
"title": "ExampleTest",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"value": {
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"examples": ["example1", "example2"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "integer",
|
||||||
|
"examples": [1, 2, 3],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
Model = SchemaConverter.build(schema)
|
||||||
|
|
||||||
|
# Since Pydantic does not natively support oneOf and the validation
|
||||||
|
# is done via a custom validator, the `value` is represented using `anyOf`
|
||||||
|
model_schema = Model.model_json_schema()
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
model_schema["properties"]["value"]["anyOf"][0]["examples"],
|
||||||
|
["example1", "example2"],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
model_schema["properties"]["value"]["anyOf"][1]["examples"],
|
||||||
|
[1, 2, 3],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_oneof_with_root_examples(self):
|
||||||
|
schema = {
|
||||||
|
"title": "ExampleTest",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"value": {
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "integer",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"examples": ["example1", 2],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"required": ["value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
Model = SchemaConverter.build(schema)
|
||||||
|
|
||||||
|
# Since Pydantic does not natively support oneOf and the validation
|
||||||
|
# is done via a custom validator, the `value` is represented using `anyOf`
|
||||||
|
model_schema = Model.model_json_schema()
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
model_schema["properties"]["value"]["examples"],
|
||||||
|
["example1", 2],
|
||||||
|
)
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ from jambo.exceptions import InternalAssertionException, InvalidSchemaException
|
|||||||
from jambo.parser import ObjectTypeParser, RefTypeParser
|
from jambo.parser import ObjectTypeParser, RefTypeParser
|
||||||
|
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
|
from typing_extensions import ForwardRef
|
||||||
|
|
||||||
from typing import ForwardRef
|
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
|
||||||
|
|
||||||
@@ -485,3 +485,38 @@ class TestRefTypeParser(TestCase):
|
|||||||
|
|
||||||
self.assertEqual(obj.name, "John")
|
self.assertEqual(obj.name, "John")
|
||||||
self.assertEqual(obj.age, 30)
|
self.assertEqual(obj.age, 30)
|
||||||
|
|
||||||
|
def test_ref_type_parser_with_def_with_examples(self):
|
||||||
|
properties = {
|
||||||
|
"title": "person",
|
||||||
|
"$ref": "#/$defs/person",
|
||||||
|
"$defs": {
|
||||||
|
"person": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
{"name": "John", "age": 30},
|
||||||
|
{"name": "Jane", "age": 25},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
_, type_validator = RefTypeParser().from_properties(
|
||||||
|
"person",
|
||||||
|
properties,
|
||||||
|
context=properties,
|
||||||
|
ref_cache={},
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
type_validator.get("examples"),
|
||||||
|
[
|
||||||
|
{"name": "John", "age": 30},
|
||||||
|
{"name": "Jane", "age": 25},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ from jambo.parser import StringTypeParser
|
|||||||
|
|
||||||
from pydantic import AnyUrl, EmailStr
|
from pydantic import AnyUrl, EmailStr
|
||||||
|
|
||||||
from datetime import date, datetime, time, timedelta
|
from datetime import date, datetime, time, timedelta, timezone
|
||||||
from ipaddress import IPv4Address, IPv6Address
|
from ipaddress import IPv4Address, IPv6Address, ip_address
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
@@ -27,6 +27,7 @@ class TestStringTypeParser(TestCase):
|
|||||||
"maxLength": 10,
|
"maxLength": 10,
|
||||||
"minLength": 1,
|
"minLength": 1,
|
||||||
"pattern": "^[a-zA-Z]+$",
|
"pattern": "^[a-zA-Z]+$",
|
||||||
|
"examples": ["test", "TEST"],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
@@ -35,6 +36,7 @@ class TestStringTypeParser(TestCase):
|
|||||||
self.assertEqual(type_validator["max_length"], 10)
|
self.assertEqual(type_validator["max_length"], 10)
|
||||||
self.assertEqual(type_validator["min_length"], 1)
|
self.assertEqual(type_validator["min_length"], 1)
|
||||||
self.assertEqual(type_validator["pattern"], "^[a-zA-Z]+$")
|
self.assertEqual(type_validator["pattern"], "^[a-zA-Z]+$")
|
||||||
|
self.assertEqual(type_validator["examples"], ["test", "TEST"])
|
||||||
|
|
||||||
def test_string_parser_with_default_value(self):
|
def test_string_parser_with_default_value(self):
|
||||||
parser = StringTypeParser()
|
parser = StringTypeParser()
|
||||||
@@ -98,11 +100,13 @@ class TestStringTypeParser(TestCase):
|
|||||||
properties = {
|
properties = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "email",
|
"format": "email",
|
||||||
|
"examples": ["test@example.com"],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, EmailStr)
|
self.assertEqual(type_parsing, EmailStr)
|
||||||
|
self.assertEqual(type_validator["examples"], ["test@example.com"])
|
||||||
|
|
||||||
def test_string_parser_with_uri_format(self):
|
def test_string_parser_with_uri_format(self):
|
||||||
parser = StringTypeParser()
|
parser = StringTypeParser()
|
||||||
@@ -110,21 +114,27 @@ class TestStringTypeParser(TestCase):
|
|||||||
properties = {
|
properties = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "uri",
|
"format": "uri",
|
||||||
|
"examples": ["test://domain/resource"],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, AnyUrl)
|
self.assertEqual(type_parsing, AnyUrl)
|
||||||
|
self.assertEqual(type_validator["examples"], [AnyUrl("test://domain/resource")])
|
||||||
|
|
||||||
def test_string_parser_with_ip_formats(self):
|
def test_string_parser_with_ip_formats(self):
|
||||||
parser = StringTypeParser()
|
parser = StringTypeParser()
|
||||||
|
|
||||||
formats = {"ipv4": IPv4Address, "ipv6": IPv6Address}
|
formats = {"ipv4": IPv4Address, "ipv6": IPv6Address}
|
||||||
|
examples = {"ipv4": ["192.168.1.1"], "ipv6": ["::1"]}
|
||||||
|
|
||||||
for ip_format, expected_type in formats.items():
|
for ip_format, expected_type in formats.items():
|
||||||
|
example = examples[ip_format]
|
||||||
|
|
||||||
properties = {
|
properties = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": ip_format,
|
"format": ip_format,
|
||||||
|
"examples": example,
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties(
|
type_parsing, type_validator = parser.from_properties(
|
||||||
@@ -132,6 +142,9 @@ class TestStringTypeParser(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, expected_type)
|
self.assertEqual(type_parsing, expected_type)
|
||||||
|
self.assertEqual(
|
||||||
|
type_validator["examples"], [ip_address(e) for e in example]
|
||||||
|
)
|
||||||
|
|
||||||
def test_string_parser_with_uuid_format(self):
|
def test_string_parser_with_uuid_format(self):
|
||||||
parser = StringTypeParser()
|
parser = StringTypeParser()
|
||||||
@@ -139,11 +152,15 @@ class TestStringTypeParser(TestCase):
|
|||||||
properties = {
|
properties = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "uuid",
|
"format": "uuid",
|
||||||
|
"examples": ["ab71aaf4-ab6e-43cd-a369-cebdd9f7a4c6"],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, UUID)
|
self.assertEqual(type_parsing, UUID)
|
||||||
|
self.assertEqual(
|
||||||
|
type_validator["examples"], [UUID("ab71aaf4-ab6e-43cd-a369-cebdd9f7a4c6")]
|
||||||
|
)
|
||||||
|
|
||||||
def test_string_parser_with_time_format(self):
|
def test_string_parser_with_time_format(self):
|
||||||
parser = StringTypeParser()
|
parser = StringTypeParser()
|
||||||
@@ -151,19 +168,33 @@ class TestStringTypeParser(TestCase):
|
|||||||
properties = {
|
properties = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "time",
|
"format": "time",
|
||||||
|
"examples": ["14:30:00", "09:15:30.500", "10:00:00+02:00"],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, time)
|
self.assertEqual(type_parsing, time)
|
||||||
|
self.assertEqual(
|
||||||
|
type_validator["examples"],
|
||||||
|
[
|
||||||
|
time(hour=14, minute=30, second=0),
|
||||||
|
time(hour=9, minute=15, second=30, microsecond=500_000),
|
||||||
|
time(hour=10, minute=0, second=0, tzinfo=timezone(timedelta(hours=2))),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
def test_string_parser_with_pattern_based_formats(self):
|
def test_string_parser_with_pattern_based_formats(self):
|
||||||
parser = StringTypeParser()
|
parser = StringTypeParser()
|
||||||
|
|
||||||
for format_type in ["hostname"]:
|
format_types = {
|
||||||
|
"hostname": "example.com",
|
||||||
|
}
|
||||||
|
|
||||||
|
for format_type, example_type in format_types.items():
|
||||||
properties = {
|
properties = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": format_type,
|
"format": format_type,
|
||||||
|
"examples": [example_type],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties(
|
type_parsing, type_validator = parser.from_properties(
|
||||||
@@ -175,6 +206,7 @@ class TestStringTypeParser(TestCase):
|
|||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
type_validator["pattern"], parser.format_pattern_mapping[format_type]
|
type_validator["pattern"], parser.format_pattern_mapping[format_type]
|
||||||
)
|
)
|
||||||
|
self.assertEqual(type_validator["examples"], [example_type])
|
||||||
|
|
||||||
def test_string_parser_with_unsupported_format(self):
|
def test_string_parser_with_unsupported_format(self):
|
||||||
parser = StringTypeParser()
|
parser = StringTypeParser()
|
||||||
@@ -198,11 +230,20 @@ class TestStringTypeParser(TestCase):
|
|||||||
properties = {
|
properties = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "date",
|
"format": "date",
|
||||||
|
"examples": ["2025-11-17", "1999-12-31", "2000-01-01"],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, date)
|
self.assertEqual(type_parsing, date)
|
||||||
|
self.assertEqual(
|
||||||
|
type_validator["examples"],
|
||||||
|
[
|
||||||
|
date(year=2025, month=11, day=17),
|
||||||
|
date(year=1999, month=12, day=31),
|
||||||
|
date(year=2000, month=1, day=1),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
def test_string_parser_with_datetime_format(self):
|
def test_string_parser_with_datetime_format(self):
|
||||||
parser = StringTypeParser()
|
parser = StringTypeParser()
|
||||||
@@ -210,11 +251,52 @@ class TestStringTypeParser(TestCase):
|
|||||||
properties = {
|
properties = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "date-time",
|
"format": "date-time",
|
||||||
|
"examples": [
|
||||||
|
"2025-11-17T11:15:00",
|
||||||
|
"2025-11-17T11:15:00+01:00",
|
||||||
|
"2025-11-17T11:15:00.123456-05:00",
|
||||||
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, datetime)
|
self.assertEqual(type_parsing, datetime)
|
||||||
|
self.assertEqual(
|
||||||
|
type_validator["examples"],
|
||||||
|
[
|
||||||
|
datetime(year=2025, month=11, day=17, hour=11, minute=15, second=0),
|
||||||
|
datetime(
|
||||||
|
year=2025,
|
||||||
|
month=11,
|
||||||
|
day=17,
|
||||||
|
hour=11,
|
||||||
|
minute=15,
|
||||||
|
second=0,
|
||||||
|
tzinfo=timezone(timedelta(hours=1)),
|
||||||
|
),
|
||||||
|
datetime(
|
||||||
|
year=2025,
|
||||||
|
month=11,
|
||||||
|
day=17,
|
||||||
|
hour=11,
|
||||||
|
minute=15,
|
||||||
|
second=0,
|
||||||
|
microsecond=123456,
|
||||||
|
tzinfo=timezone(timedelta(hours=-5)),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_string_parser_with_invalid_example_value(self):
|
||||||
|
with self.assertRaises(InvalidSchemaException):
|
||||||
|
StringTypeParser().from_properties(
|
||||||
|
"placeholder",
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"format": "email",
|
||||||
|
"examples": ["invalid-email"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
def test_string_parser_with_timedelta_format(self):
|
def test_string_parser_with_timedelta_format(self):
|
||||||
parser = StringTypeParser()
|
parser = StringTypeParser()
|
||||||
@@ -222,8 +304,18 @@ class TestStringTypeParser(TestCase):
|
|||||||
properties = {
|
properties = {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "duration",
|
"format": "duration",
|
||||||
|
"examples": ["P1Y2M3DT4H5M6S", "PT30M", "P7D", "PT0.5S"],
|
||||||
}
|
}
|
||||||
|
|
||||||
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
type_parsing, type_validator = parser.from_properties("placeholder", properties)
|
||||||
|
|
||||||
self.assertEqual(type_parsing, timedelta)
|
self.assertEqual(type_parsing, timedelta)
|
||||||
|
self.assertEqual(
|
||||||
|
type_validator["examples"],
|
||||||
|
[
|
||||||
|
timedelta(days=428, hours=4, minutes=5, seconds=6),
|
||||||
|
timedelta(minutes=30),
|
||||||
|
timedelta(days=7),
|
||||||
|
timedelta(seconds=0.5),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|||||||
@@ -20,3 +20,14 @@ class TestGenericTypeParser(TestCase):
|
|||||||
def test_get_impl_invalid_type(self):
|
def test_get_impl_invalid_type(self):
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
GenericTypeParser._get_impl({"type": "invalid_type"})
|
GenericTypeParser._get_impl({"type": "invalid_type"})
|
||||||
|
|
||||||
|
def test_invalid_examples_not_list(self):
|
||||||
|
parser = StringTypeParser()
|
||||||
|
|
||||||
|
properties = {
|
||||||
|
"type": "integer",
|
||||||
|
"examples": "this should be a list",
|
||||||
|
}
|
||||||
|
|
||||||
|
with self.assertRaises(InvalidSchemaException):
|
||||||
|
parser.from_properties("placeholder", properties)
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
from jambo import SchemaConverter
|
from jambo import SchemaConverter
|
||||||
from jambo.exceptions import InvalidSchemaException, UnsupportedSchemaException
|
from jambo.exceptions import InvalidSchemaException, UnsupportedSchemaException
|
||||||
|
from jambo.types import JSONSchema
|
||||||
|
|
||||||
from pydantic import AnyUrl, BaseModel, ValidationError
|
from pydantic import AnyUrl, BaseModel, ValidationError
|
||||||
|
from typing_extensions import get_args
|
||||||
|
|
||||||
from ipaddress import IPv4Address, IPv6Address
|
from ipaddress import IPv4Address, IPv6Address
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
@@ -13,6 +15,12 @@ def is_pydantic_model(cls):
|
|||||||
|
|
||||||
|
|
||||||
class TestSchemaConverter(TestCase):
|
class TestSchemaConverter(TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.converter = SchemaConverter()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.converter.clear_ref_cache(namespace=None)
|
||||||
|
|
||||||
def test_invalid_schema(self):
|
def test_invalid_schema(self):
|
||||||
schema = {
|
schema = {
|
||||||
"title": 1,
|
"title": 1,
|
||||||
@@ -25,7 +33,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
SchemaConverter.build(schema)
|
self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
def test_invalid_schema_type(self):
|
def test_invalid_schema_type(self):
|
||||||
schema = {
|
schema = {
|
||||||
@@ -39,7 +47,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
SchemaConverter.build(schema)
|
self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
def test_build_expects_title(self):
|
def test_build_expects_title(self):
|
||||||
schema = {
|
schema = {
|
||||||
@@ -52,7 +60,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
SchemaConverter.build(schema)
|
self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
def test_build_expects_object(self):
|
def test_build_expects_object(self):
|
||||||
schema = {
|
schema = {
|
||||||
@@ -62,7 +70,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(UnsupportedSchemaException):
|
with self.assertRaises(UnsupportedSchemaException):
|
||||||
SchemaConverter.build(schema)
|
self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
def test_is_invalid_field(self):
|
def test_is_invalid_field(self):
|
||||||
schema = {
|
schema = {
|
||||||
@@ -78,7 +86,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException) as context:
|
with self.assertRaises(InvalidSchemaException) as context:
|
||||||
SchemaConverter.build(schema)
|
self.converter.build_with_cache(schema)
|
||||||
self.assertTrue("Unknown type" in str(context.exception))
|
self.assertTrue("Unknown type" in str(context.exception))
|
||||||
|
|
||||||
def test_jsonschema_to_pydantic(self):
|
def test_jsonschema_to_pydantic(self):
|
||||||
@@ -93,7 +101,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["name"],
|
"required": ["name"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
self.assertTrue(is_pydantic_model(model))
|
self.assertTrue(is_pydantic_model(model))
|
||||||
|
|
||||||
@@ -114,7 +122,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["name"],
|
"required": ["name"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
self.assertEqual(model(name="John", age=30).name, "John")
|
self.assertEqual(model(name="John", age=30).name, "John")
|
||||||
|
|
||||||
@@ -145,7 +153,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["age"],
|
"required": ["age"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
self.assertEqual(model(age=30).age, 30)
|
self.assertEqual(model(age=30).age, 30)
|
||||||
|
|
||||||
@@ -170,7 +178,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["age"],
|
"required": ["age"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
self.assertEqual(model(age=30).age, 30.0)
|
self.assertEqual(model(age=30).age, 30.0)
|
||||||
|
|
||||||
@@ -191,7 +199,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["is_active"],
|
"required": ["is_active"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
self.assertEqual(model(is_active=True).is_active, True)
|
self.assertEqual(model(is_active=True).is_active, True)
|
||||||
|
|
||||||
@@ -214,7 +222,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["friends"],
|
"required": ["friends"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
model(friends=["John", "Jane", "John"]).friends, {"John", "Jane"}
|
model(friends=["John", "Jane", "John"]).friends, {"John", "Jane"}
|
||||||
@@ -227,26 +235,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
model(friends=["John", "Jane", "Invalid"])
|
model(friends=["John", "Jane", "Invalid"])
|
||||||
|
|
||||||
def test_validation_list_with_missing_items(self):
|
def test_validation_list_with_missing_items(self):
|
||||||
model = SchemaConverter.build(
|
model = self.converter.build_with_cache(
|
||||||
{
|
|
||||||
"title": "Person",
|
|
||||||
"description": "A person",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"friends": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {"type": "string"},
|
|
||||||
"minItems": 1,
|
|
||||||
"maxItems": 2,
|
|
||||||
"default": ["John", "Jane"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
self.assertEqual(model().friends, ["John", "Jane"])
|
|
||||||
|
|
||||||
model = SchemaConverter.build(
|
|
||||||
{
|
{
|
||||||
"title": "Person",
|
"title": "Person",
|
||||||
"description": "A person",
|
"description": "A person",
|
||||||
@@ -284,7 +273,8 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["address"],
|
"required": ["address"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
self.assertEqual(model.__doc__, "A person")
|
||||||
|
|
||||||
obj = model(address={"street": "123 Main St", "city": "Springfield"})
|
obj = model(address={"street": "123 Main St", "city": "Springfield"})
|
||||||
|
|
||||||
@@ -308,7 +298,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["name"],
|
"required": ["name"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = model(name="John")
|
obj = model(name="John")
|
||||||
|
|
||||||
@@ -331,7 +321,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
SchemaConverter.build(schema_max_length)
|
self.converter.build_with_cache(schema_max_length)
|
||||||
|
|
||||||
def test_default_for_list(self):
|
def test_default_for_list(self):
|
||||||
schema_list = {
|
schema_list = {
|
||||||
@@ -348,10 +338,11 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["friends"],
|
"required": ["friends"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model_list = SchemaConverter.build(schema_list)
|
model_list = self.converter.build_with_cache(schema_list)
|
||||||
|
|
||||||
self.assertEqual(model_list().friends, ["John", "Jane"])
|
self.assertEqual(model_list().friends, ["John", "Jane"])
|
||||||
|
|
||||||
|
def test_default_for_list_with_unique_items(self):
|
||||||
# Test for default with uniqueItems
|
# Test for default with uniqueItems
|
||||||
schema_set = {
|
schema_set = {
|
||||||
"title": "Person",
|
"title": "Person",
|
||||||
@@ -368,7 +359,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["friends"],
|
"required": ["friends"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model_set = SchemaConverter.build(schema_set)
|
model_set = self.converter.build_with_cache(schema_set)
|
||||||
|
|
||||||
self.assertEqual(model_set().friends, {"John", "Jane"})
|
self.assertEqual(model_set().friends, {"John", "Jane"})
|
||||||
|
|
||||||
@@ -390,7 +381,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["address"],
|
"required": ["address"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = model(address={"street": "123 Main St", "city": "Springfield"})
|
obj = model(address={"street": "123 Main St", "city": "Springfield"})
|
||||||
|
|
||||||
@@ -414,7 +405,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
Model = SchemaConverter.build(schema)
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = Model(
|
obj = Model(
|
||||||
name="J",
|
name="J",
|
||||||
@@ -443,7 +434,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
Model = SchemaConverter.build(schema)
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = Model(id=1)
|
obj = Model(id=1)
|
||||||
self.assertEqual(obj.id, 1)
|
self.assertEqual(obj.id, 1)
|
||||||
@@ -467,7 +458,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"properties": {"email": {"type": "string", "format": "email"}},
|
"properties": {"email": {"type": "string", "format": "email"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
self.assertEqual(model(email="test@example.com").email, "test@example.com")
|
self.assertEqual(model(email="test@example.com").email, "test@example.com")
|
||||||
|
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
@@ -480,7 +471,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"properties": {"website": {"type": "string", "format": "uri"}},
|
"properties": {"website": {"type": "string", "format": "uri"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
model(website="https://example.com").website, AnyUrl("https://example.com")
|
model(website="https://example.com").website, AnyUrl("https://example.com")
|
||||||
)
|
)
|
||||||
@@ -495,7 +486,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"properties": {"ip": {"type": "string", "format": "ipv4"}},
|
"properties": {"ip": {"type": "string", "format": "ipv4"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
self.assertEqual(model(ip="192.168.1.1").ip, IPv4Address("192.168.1.1"))
|
self.assertEqual(model(ip="192.168.1.1").ip, IPv4Address("192.168.1.1"))
|
||||||
|
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
@@ -508,7 +499,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"properties": {"ip": {"type": "string", "format": "ipv6"}},
|
"properties": {"ip": {"type": "string", "format": "ipv6"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
model(ip="2001:0db8:85a3:0000:0000:8a2e:0370:7334").ip,
|
model(ip="2001:0db8:85a3:0000:0000:8a2e:0370:7334").ip,
|
||||||
IPv6Address("2001:0db8:85a3:0000:0000:8a2e:0370:7334"),
|
IPv6Address("2001:0db8:85a3:0000:0000:8a2e:0370:7334"),
|
||||||
@@ -524,7 +515,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"properties": {"id": {"type": "string", "format": "uuid"}},
|
"properties": {"id": {"type": "string", "format": "uuid"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
model(id="123e4567-e89b-12d3-a456-426614174000").id,
|
model(id="123e4567-e89b-12d3-a456-426614174000").id,
|
||||||
@@ -541,7 +532,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"properties": {"hostname": {"type": "string", "format": "hostname"}},
|
"properties": {"hostname": {"type": "string", "format": "hostname"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
self.assertEqual(model(hostname="example.com").hostname, "example.com")
|
self.assertEqual(model(hostname="example.com").hostname, "example.com")
|
||||||
|
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
@@ -554,7 +545,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"properties": {"timestamp": {"type": "string", "format": "date-time"}},
|
"properties": {"timestamp": {"type": "string", "format": "date-time"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
model(timestamp="2024-01-01T12:00:00Z").timestamp.isoformat(),
|
model(timestamp="2024-01-01T12:00:00Z").timestamp.isoformat(),
|
||||||
"2024-01-01T12:00:00+00:00",
|
"2024-01-01T12:00:00+00:00",
|
||||||
@@ -570,7 +561,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"properties": {"time": {"type": "string", "format": "time"}},
|
"properties": {"time": {"type": "string", "format": "time"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
model(time="20:20:39+00:00").time.isoformat(), "20:20:39+00:00"
|
model(time="20:20:39+00:00").time.isoformat(), "20:20:39+00:00"
|
||||||
)
|
)
|
||||||
@@ -586,7 +577,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
with self.assertRaises(InvalidSchemaException):
|
with self.assertRaises(InvalidSchemaException):
|
||||||
SchemaConverter.build(schema)
|
self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
def test_ref_with_root_ref(self):
|
def test_ref_with_root_ref(self):
|
||||||
schema = {
|
schema = {
|
||||||
@@ -602,7 +593,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["name", "age"],
|
"required": ["name", "age"],
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = model(
|
obj = model(
|
||||||
name="John",
|
name="John",
|
||||||
@@ -637,7 +628,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
model = SchemaConverter.build(schema)
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = model(
|
obj = model(
|
||||||
name="John",
|
name="John",
|
||||||
@@ -676,7 +667,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
Model = SchemaConverter.build(schema)
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = Model(
|
obj = Model(
|
||||||
name="John",
|
name="John",
|
||||||
@@ -702,7 +693,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["status"],
|
"required": ["status"],
|
||||||
}
|
}
|
||||||
|
|
||||||
Model = SchemaConverter.build(schema)
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = Model(status="active")
|
obj = Model(status="active")
|
||||||
self.assertEqual(obj.status.value, "active")
|
self.assertEqual(obj.status.value, "active")
|
||||||
@@ -721,7 +712,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["status"],
|
"required": ["status"],
|
||||||
}
|
}
|
||||||
|
|
||||||
Model = SchemaConverter.build(schema)
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = Model()
|
obj = Model()
|
||||||
self.assertEqual(obj.status.value, "active")
|
self.assertEqual(obj.status.value, "active")
|
||||||
@@ -738,7 +729,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["name"],
|
"required": ["name"],
|
||||||
}
|
}
|
||||||
|
|
||||||
Model = SchemaConverter.build(schema)
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = Model()
|
obj = Model()
|
||||||
self.assertEqual(obj.name, "United States of America")
|
self.assertEqual(obj.name, "United States of America")
|
||||||
@@ -761,7 +752,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
"required": ["name"],
|
"required": ["name"],
|
||||||
}
|
}
|
||||||
|
|
||||||
Model = SchemaConverter.build(schema)
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = Model()
|
obj = Model()
|
||||||
self.assertEqual(obj.name, ["Brazil"])
|
self.assertEqual(obj.name, ["Brazil"])
|
||||||
@@ -781,7 +772,7 @@ class TestSchemaConverter(TestCase):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
Model = SchemaConverter.build(schema)
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
obj = Model()
|
obj = Model()
|
||||||
self.assertIsNone(obj.a_thing)
|
self.assertIsNone(obj.a_thing)
|
||||||
@@ -791,3 +782,392 @@ class TestSchemaConverter(TestCase):
|
|||||||
|
|
||||||
with self.assertRaises(ValidationError):
|
with self.assertRaises(ValidationError):
|
||||||
Model(a_thing="not none")
|
Model(a_thing="not none")
|
||||||
|
|
||||||
|
def test_scoped_ref_schema(self):
|
||||||
|
schema: JSONSchema = {
|
||||||
|
"title": "Example Schema",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"operating_system": {
|
||||||
|
"oneOf": [
|
||||||
|
{"$ref": "#/$defs/operating_system"},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"creation": {"$ref": "#/$defs/operating_system"},
|
||||||
|
"reinstallation": {"$ref": "#/$defs/operating_system"},
|
||||||
|
},
|
||||||
|
"required": ["creation", "reinstallation"],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"$defs": {
|
||||||
|
"operating_system": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"version": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["name", "version"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
schema_type = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
# check for me that the types generated by the oneOf in the typing.Annotated have different names
|
||||||
|
operating_system_field = schema_type.model_fields["operating_system"]
|
||||||
|
|
||||||
|
arg1, arg2 = get_args(operating_system_field.annotation)
|
||||||
|
|
||||||
|
first_type = get_args(arg1)[0]
|
||||||
|
second_type = get_args(arg2)[0]
|
||||||
|
|
||||||
|
self.assertNotEqual(first_type.__name__, second_type.__name__)
|
||||||
|
|
||||||
|
def test_object_invalid_require(self):
|
||||||
|
# https://github.com/HideyoshiNakazone/jambo/issues/60
|
||||||
|
object_ = self.converter.build_with_cache(
|
||||||
|
{
|
||||||
|
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||||
|
"title": "TEST",
|
||||||
|
"type": "object",
|
||||||
|
"required": ["title"],
|
||||||
|
"properties": {
|
||||||
|
"title": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The title of the object",
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"summary": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
"details": {
|
||||||
|
"type": "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertFalse(object_.model_fields["description"].is_required()) # FAIL
|
||||||
|
|
||||||
|
def test_instance_level_ref_cache(self):
|
||||||
|
ref_cache = {}
|
||||||
|
|
||||||
|
schema = {
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"emergency_contact": {
|
||||||
|
"$ref": "#",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "age"],
|
||||||
|
}
|
||||||
|
|
||||||
|
converter1 = SchemaConverter(ref_cache)
|
||||||
|
model1 = converter1.build_with_cache(schema)
|
||||||
|
|
||||||
|
converter2 = SchemaConverter(ref_cache)
|
||||||
|
model2 = converter2.build_with_cache(schema)
|
||||||
|
|
||||||
|
self.assertIs(model1, model2)
|
||||||
|
|
||||||
|
def test_instance_level_ref_cache_isolation_via_without_cache_param(self):
|
||||||
|
schema = {
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"emergency_contact": {
|
||||||
|
"$ref": "#",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "age"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model1 = self.converter.build_with_cache(schema, without_cache=True)
|
||||||
|
model2 = self.converter.build_with_cache(schema, without_cache=True)
|
||||||
|
|
||||||
|
self.assertIsNot(model1, model2)
|
||||||
|
|
||||||
|
def test_instance_level_ref_cache_isolation_via_provided_cache(self):
|
||||||
|
schema = {
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"emergency_contact": {
|
||||||
|
"$ref": "#",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "age"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model1 = self.converter.build_with_cache(schema, ref_cache={})
|
||||||
|
model2 = self.converter.build_with_cache(schema, ref_cache={})
|
||||||
|
|
||||||
|
self.assertIsNot(model1, model2)
|
||||||
|
|
||||||
|
def test_get_type_from_cache(self):
|
||||||
|
schema = {
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"emergency_contact": {
|
||||||
|
"$ref": "#",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "age"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
cached_model = self.converter.get_cached_ref("Person")
|
||||||
|
|
||||||
|
self.assertIs(model, cached_model)
|
||||||
|
|
||||||
|
def test_get_type_from_cache_not_found(self):
|
||||||
|
cached_model = self.converter.get_cached_ref("NonExistentModel")
|
||||||
|
|
||||||
|
self.assertIsNone(cached_model)
|
||||||
|
|
||||||
|
def test_get_type_from_cache_nested_type(self):
|
||||||
|
schema = {
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["street", "city"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "age", "address"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
cached_model = self.converter.get_cached_ref("Person.address")
|
||||||
|
|
||||||
|
self.assertIsNotNone(cached_model)
|
||||||
|
self.assertIs(model.model_fields["address"].annotation, cached_model)
|
||||||
|
|
||||||
|
def test_get_type_from_cache_with_def(self):
|
||||||
|
schema = {
|
||||||
|
"title": "person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {"$ref": "#/$defs/address"},
|
||||||
|
},
|
||||||
|
"$defs": {
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["street", "city"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
person_model = self.converter.build_with_cache(schema)
|
||||||
|
cached_person_model = self.converter.get_cached_ref("person")
|
||||||
|
|
||||||
|
self.assertIs(person_model, cached_person_model)
|
||||||
|
|
||||||
|
cached_address_model = self.converter.get_cached_ref("address")
|
||||||
|
|
||||||
|
self.assertIsNotNone(cached_address_model)
|
||||||
|
|
||||||
|
def test_parse_list_type_multiple_values(self):
|
||||||
|
schema = {
|
||||||
|
"title": "TestListType",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {"values": {"type": ["string", "number"]}},
|
||||||
|
}
|
||||||
|
|
||||||
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
obj1 = Model(values="a string")
|
||||||
|
self.assertEqual(obj1.values, "a string")
|
||||||
|
|
||||||
|
obj2 = Model(values=42)
|
||||||
|
self.assertEqual(obj2.values, 42)
|
||||||
|
|
||||||
|
def test_parse_list_type_one_value(self):
|
||||||
|
schema = {
|
||||||
|
"title": "TestListType",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {"values": {"type": ["string"]}},
|
||||||
|
}
|
||||||
|
|
||||||
|
Model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
obj1 = Model(values="a string")
|
||||||
|
self.assertEqual(obj1.values, "a string")
|
||||||
|
|
||||||
|
def test_parse_list_type_empty(self):
|
||||||
|
schema = {
|
||||||
|
"title": "TestListType",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {"values": {"type": []}},
|
||||||
|
}
|
||||||
|
|
||||||
|
with self.assertRaises(InvalidSchemaException):
|
||||||
|
self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
def test_parse_list_type_root_level_throws(self):
|
||||||
|
schema = {"title": "TestListType", "type": ["string", "number"]}
|
||||||
|
|
||||||
|
with self.assertRaises(InvalidSchemaException):
|
||||||
|
self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
def tests_instance_level_ref_cache_isolation_via_property_id(self):
|
||||||
|
schema1: JSONSchema = {
|
||||||
|
"$id": "http://example.com/schemas/person1.json",
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"emergency_contact": {
|
||||||
|
"$ref": "#",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "age"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model1 = self.converter.build_with_cache(schema1)
|
||||||
|
|
||||||
|
schema2: JSONSchema = {
|
||||||
|
"$id": "http://example.com/schemas/person2.json",
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["name", "age", "address"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model2 = self.converter.build_with_cache(schema2)
|
||||||
|
|
||||||
|
self.assertIsNot(model1, model2)
|
||||||
|
|
||||||
|
def tests_instance_level_ref_cache_colision_when_same_property_id(self):
|
||||||
|
schema1: JSONSchema = {
|
||||||
|
"$id": "http://example.com/schemas/person.json",
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"emergency_contact": {
|
||||||
|
"$ref": "#",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "age"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model1 = self.converter.build_with_cache(schema1)
|
||||||
|
|
||||||
|
schema2: JSONSchema = {
|
||||||
|
"$id": "http://example.com/schemas/person.json",
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["name", "age", "address"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model2 = self.converter.build_with_cache(schema2)
|
||||||
|
|
||||||
|
self.assertIs(model1, model2)
|
||||||
|
|
||||||
|
def test_namespace_isolation_via_on_call_config(self):
|
||||||
|
namespace = "namespace1"
|
||||||
|
|
||||||
|
schema: JSONSchema = {
|
||||||
|
"$id": namespace,
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["street", "city"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "age", "address"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
invalid_cached_model = self.converter.get_cached_ref("Person")
|
||||||
|
self.assertIsNone(invalid_cached_model)
|
||||||
|
|
||||||
|
cached_model = self.converter.get_cached_ref("Person", namespace=namespace)
|
||||||
|
self.assertIs(model, cached_model)
|
||||||
|
|
||||||
|
def test_clear_namespace_registry(self):
|
||||||
|
namespace = "namespace_to_clear"
|
||||||
|
|
||||||
|
schema: JSONSchema = {
|
||||||
|
"$id": namespace,
|
||||||
|
"title": "Person",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"age": {"type": "integer"},
|
||||||
|
"address": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"street": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["street", "city"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["name", "age", "address"],
|
||||||
|
}
|
||||||
|
|
||||||
|
model = self.converter.build_with_cache(schema)
|
||||||
|
|
||||||
|
cached_model = self.converter.get_cached_ref("Person", namespace=namespace)
|
||||||
|
self.assertIs(model, cached_model)
|
||||||
|
|
||||||
|
self.converter.clear_ref_cache(namespace=namespace)
|
||||||
|
|
||||||
|
cleared_cached_model = self.converter.get_cached_ref(
|
||||||
|
"Person", namespace=namespace
|
||||||
|
)
|
||||||
|
self.assertIsNone(cleared_cached_model)
|
||||||
|
|||||||
Reference in New Issue
Block a user