193 Commits

Author SHA1 Message Date
e8bda6bc07 Merge pull request #71 from HideyoshiNakazone/fix/fixes-annotation-definition-anyof
fix: fixes annotation definition in anyof parser
2025-11-28 18:30:38 -03:00
d8fe98639a fix: fixes annotation definition in anyof parser 2025-11-28 18:28:49 -03:00
666e12262f Merge pull request #68 from HideyoshiNakazone/feature/cache-per-namespace
chore: minor adjustment of docs
2025-11-26 15:31:10 -03:00
ab9646238e chore: minor adjustment of docs 2025-11-26 15:30:38 -03:00
dba492a6dc Merge pull request #67 from HideyoshiNakazone/feature/cache-per-namespace
feat: adds caching per namespace
2025-11-26 15:28:17 -03:00
628abe161d feat: adds newly added feature to the docs 2025-11-26 15:23:29 -03:00
136d68d273 feat: alter tests to clear all namespaces on tearDown 2025-11-26 15:07:22 -03:00
fcea994dd6 feat: adds caching per namespace 2025-11-26 15:05:10 -03:00
39a9612106 Merge pull request #66 from HideyoshiNakazone/feature/support-type-list
feat: adds support for list of types
2025-11-26 10:55:32 -03:00
27e756dadf feat: format and linting pre-merge 2025-11-26 10:54:42 -03:00
40106e4765 feat: validates that top level type cannot be list 2025-11-26 10:52:50 -03:00
d418ad96ad feat: adds support for list of types 2025-11-26 10:48:31 -03:00
79e65b994e Merge pull request #65 from HideyoshiNakazone/chore/improves-readme
chore: improves documentation and readme
2025-11-26 09:50:38 -03:00
beed4e5e97 chore: improves documentation and readme 2025-11-26 09:49:38 -03:00
b705a3a70b Merge pull request #64 from HideyoshiNakazone/feature/alters-library-api
feature: stabilized the new instance method and adds docs
2025-11-26 00:05:15 -03:00
268ac85667 chore: adds documentation for the new ref_cache implementation 2025-11-26 00:02:57 -03:00
20872d4a91 feat: stabalizes the api for cached build using instance method 2025-11-25 22:26:28 -03:00
34910b55d7 Merge pull request #63 from HideyoshiNakazone/feature/add-instance-level-ref-cache
Feature/add instance level ref cache
2025-11-24 21:07:55 -03:00
a3cbd5bc3d feat: better warning for cache colision 2025-11-24 21:06:15 -03:00
682f19654d feat: better methodology for accessing cached references of: objects, subobjects and defs 2025-11-24 20:52:02 -03:00
4baaeed349 feat: adds test for ObjectTypeParser asserting for the presence of a ref_cache 2025-11-24 20:00:42 -03:00
9837a99ec9 feat: adds tests for type not found in ref_cache 2025-11-24 19:53:29 -03:00
3a8ca951db feat: adds tests for isolation method in ref_cache 2025-11-24 19:52:54 -03:00
57f8b571de feat: adds tests for SchemaConverter.get_cached_ref 2025-11-24 19:38:53 -03:00
5ec30cd565 feat: changes tests to use instance level build 2025-11-24 19:32:42 -03:00
c2b9e8daf8 fix: fixes implementation of save object to cache and adds tests 2025-11-24 18:21:27 -03:00
328eb66034 fix: fixes save object after parsing 2025-11-24 18:20:51 -03:00
4de711075e feat: removes unecessary api keyword 2025-11-24 18:20:51 -03:00
abc8bc2e40 feat: saves object after parsing 2025-11-24 18:20:51 -03:00
10bad254d7 feat: initial implementation of instance level ref cache 2025-11-24 18:20:50 -03:00
b5e2d703cb Merge pull request #62 from JCHacking/template-bug
ci(github-actions): correct label of bug
2025-11-24 18:04:06 -03:00
JCHacking
44fa0cf16a ci(github-actions): correct label of bug 2025-11-24 19:31:16 +01:00
d11e3191c3 Merge pull request #61 from HideyoshiNakazone/fix/object-invalid-required
fix: fixes invalid subobject required
2025-11-24 14:38:48 -03:00
2da409e6df fix: fixes invalid subobject required 2025-11-24 17:34:29 +00:00
e775b53f7d Merge pull request #58 from HideyoshiNakazone/feature/oneof-unique-subtypes-naming
Feature/oneof unique subtypes naming
2025-11-23 22:11:00 -03:00
f15913c58e feat: tests that the generated fields in the oneOf parser have unique names and applies the same logic to the anyOf parser 2025-11-23 22:09:55 -03:00
f80a1bbda3 feat: fixes error of multiple forwardref with same name 2025-11-23 21:52:09 -03:00
b31c990b54 Merge pull request #57 from HideyoshiNakazone/feature/adds-title-keyword
feat: adds title and deprecated to the list of default mappings in th…
2025-11-23 20:19:19 -03:00
a0d15726d4 feat: adds title and deprecated to the list of default mappings in the GenericTypeParser 2025-11-23 20:17:16 -03:00
59f062ec37 Merge pull request #54 from JCHacking/examples
feat: Add examples
2025-11-23 20:10:27 -03:00
5036059272 feat: adds tests for examples in ref 2025-11-23 20:09:16 -03:00
90639b6426 chore: subs typing import to typing_extensions 2025-11-23 20:05:28 -03:00
e43e92cb9e feat: minor adjustments to oneOf and adds tests for examples in allOf, oneOf, anyOf 2025-11-23 20:03:19 -03:00
ffbd124cf9 feat: adds example to allOf 2025-11-23 18:59:47 -03:00
cfbe1f38c8 feat: fixes broken example property extraction in array type parser 2025-11-23 18:09:35 -03:00
9823e69329 feat: fixes test for object example 2025-11-23 15:02:50 -03:00
84292cf3c0 feat: fixes and validates so that arrays have parsed examples 2025-11-23 14:59:16 -03:00
8b1520741b feat: fixes and validates that objects have parsed examples 2025-11-23 14:40:14 -03:00
c7e366cf08 feat: improves test coverage 2025-11-23 02:42:54 -03:00
ebcc8a295e feat: remove unecessary dependency group 2025-11-23 02:37:14 -03:00
07f301db1c feat: removes python3.10 specific broken test 2025-11-23 02:25:41 -03:00
c9330dfd6d feat: fixes error on validation of IPAddresses by Upgrading Pydantic min version to v2.12.4, fixes internal tests implementation and fixes minor logic errors 2025-11-23 02:15:41 -03:00
JCHacking
9bc16ff1aa remove print 2025-11-17 23:45:08 +01:00
JCHacking
43ce95cc9a feat(examples): Add examples for primitive types
Refs: #52
2025-11-17 23:42:59 +01:00
81c149120e Merge pull request #50 from fredsonnenwald/string_format
Fix Field deprecation warning resulting from building models with formatted strings
2025-09-15 19:18:20 -03:00
171dddabab Merge pull request #51 from HideyoshiNakazone/chore/adds-pyright-config
chore: adds pyright config to project
2025-09-15 13:55:26 -03:00
f0192ee6d3 chore: adds pyright config to project 2025-09-15 13:54:32 -03:00
Fred Sonnenwald
82feea0ab1 Fix string Field deprecation warning
(partial revert of fbbff0b)
2025-09-15 16:50:49 +01:00
4d5ac1c885 Merge pull request #49 from HideyoshiNakazone/fix/fixes-docs
fix: fixes docs build
2025-09-14 10:51:26 -03:00
92c174c189 fix: fixes docs build 2025-09-14 10:49:53 -03:00
b1b5e71a81 Merge pull request #48 from HideyoshiNakazone/feature/explicit-exception-type
feat: more pythonic error parent class
2025-09-14 01:42:11 -03:00
156c825a67 feat: more pythonic error parent class 2025-09-14 01:40:59 -03:00
b4954c3b2e Merge pull request #47 from HideyoshiNakazone/feature/explicit-exception-type
Feature/explicit exception type
2025-09-14 01:13:27 -03:00
7f44e84bce feat: updates outdated docs for exceptions 2025-09-14 01:12:43 -03:00
8c6a04bbdf feat: adds simple tests for internal exceptions 2025-09-14 01:09:48 -03:00
e31002af32 feat: fixes tests to validate the type of exception thrown 2025-09-14 00:47:24 -03:00
30290771b1 feat: alters all standart errors and messages for more specific errors 2025-09-14 00:10:33 -03:00
f4d84d2749 feat: better exceptions for GenericTypeParser and AllOfTypeParser 2025-09-13 21:11:11 -03:00
e61d48881f feat: initial implementation of explicit exception types 2025-09-13 20:43:30 -03:00
f5ad857326 Merge pull request #46 from HideyoshiNakazone/feature/better-internal-typing
Better Internat Static Typing
2025-09-13 19:49:17 -03:00
e45086e29e feat: adds static type check to ci/cd 2025-09-13 19:48:17 -03:00
c1f04606ad fix: removes unecessary check 2025-09-13 19:36:53 -03:00
5eb086bafd Better Internat Static Typing 2025-09-13 00:16:41 -03:00
5c30e752e3 Merge pull request #45 from HideyoshiNakazone/chore/fixes-license-pyproject
chore: fixes license in pyproject - no change was made
2025-09-12 11:36:55 -03:00
53418f2b2b chore: fixes license in pyproject - no change was made 2025-09-12 11:36:11 -03:00
002b75c53a Merge pull request #44 from h0rv/feature/add-py-typed-support
feat: Add py.typed marker file for proper typing support
2025-09-12 10:23:13 -03:00
Robby
1167b8a540 feat: Add py.typed marker file for proper typing support
- Add py.typed marker file to jambo package directory
- Enable static type checkers to recognize and use type annotations from the library

This allows IDEs and tools like mypy, pyright to properly type-check code using this library.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-10 13:59:39 -04:00
3992057c95 Merge pull request #43 from HideyoshiNakazone/maintenance/format-lint-code
(improvement): Formats and Lints Code - Minor Changes
2025-08-20 01:13:25 -03:00
71380073e4 (improvement): Formats and Lints Code - Minor Changes 2025-08-20 01:12:56 -03:00
4055efa5bf Merge pull request #42 from HideyoshiNakazone/improvement/better-string-validations
(improvement): Adds More Type Formats to String Parser
2025-08-20 00:31:46 -03:00
97aed6e9aa (improvement): Adds tests for UUID String Format 2025-08-20 00:30:54 -03:00
d3a2f1e76c (improvement): Adds More Type Formats to String Parser 2025-08-20 00:25:02 -03:00
0a3671974f Merge pull request #41 from HideyoshiNakazone/feature/fixes-docs
(fix): Fixes docs
2025-08-20 00:00:30 -03:00
8761ee5ef6 (fix): Fixes docs 2025-08-20 00:00:03 -03:00
85b5900392 Merge pull request #40 from HideyoshiNakazone/fix/adds-check-for-discriminator-type
(fix): Adds check for discriminator type
2025-08-19 22:31:02 -03:00
7e11c817a7 (fix): Adds check for discriminator type 2025-08-19 22:28:58 -03:00
dc5853c5b2 Merge pull request #39 from HideyoshiNakazone/feature/fixes-readme
(project): Fixes Readme
2025-08-19 20:48:27 -03:00
1e5b686c23 (project): Fixes Readme 2025-08-19 20:47:58 -03:00
bbe4c6979e Merge pull request #37 from HideyoshiNakazone/feature/implements-one-of
[FEATURE] Implements OneOf
2025-08-19 20:45:30 -03:00
c5e70402db (feat): Adds Warning to Docs About Discriminator Keyword 2025-08-19 20:44:16 -03:00
15944549a0 (feat): Adds Aditional Tests 2025-08-19 20:40:49 -03:00
79932bb595 (feature): Removes _has_meaningful_constraints
Removes _has_meaningful_constraints since nowhere in the spec says that a subproperty should have a meaningful value other that its type
2025-08-19 20:29:25 -03:00
86894fa918 (feature): Fix OneOf behavior on invalid discriminator
According to the spec, propertyName is required when using a discriminator. If it is missing, the schema is invalid and should throw.
2025-08-19 20:20:20 -03:00
b386d4954e Merge remote-tracking branch 'origin/main' into feature/implements-one-of 2025-08-19 19:02:43 -03:00
1cab13a4a0 Merge pull request #38 from HideyoshiNakazone/feature/better-const-typing
[FEATURE] Adds Better Const Typing
2025-08-19 19:02:09 -03:00
6dad6e0c68 (feat): Adds Aditional Test for Non-Hashable Const Values 2025-08-19 18:58:33 -03:00
fbbff0bd9e Removes Changes Not Feature Specific 2025-08-19 18:49:45 -03:00
Thomas
9aec7c3e3b feat(jambo): Add oneOf parser (#5)
* Add support for `oneOf` type parsing with validation and example cases

* Improve `oneOf` type parsing: refine validators, add discriminator support, and expand test coverage

* Add hashable and non-hashable value support to `ConstTypeParser` with expanded test cases

* Refine `field_props` check in `_type_parser` for cleaner default handling

* Update `StringTypeParser` to refine `format` handling and enrich `json_schema_extra`

* Remove outdated `oneOf` examples from docs, expand test cases and provide refined examples with discriminator support
2025-08-19 18:44:01 -03:00
cc6f2d42d5 Separates PR for Better Testing and Readability 2025-08-19 18:40:30 -03:00
Thomas
9797fb35d9 feat(jambo): Add oneOf parser (#5)
* Add support for `oneOf` type parsing with validation and example cases

* Improve `oneOf` type parsing: refine validators, add discriminator support, and expand test coverage

* Add hashable and non-hashable value support to `ConstTypeParser` with expanded test cases

* Refine `field_props` check in `_type_parser` for cleaner default handling

* Update `StringTypeParser` to refine `format` handling and enrich `json_schema_extra`

* Remove outdated `oneOf` examples from docs, expand test cases and provide refined examples with discriminator support
2025-08-19 18:31:51 -03:00
81a5fffef0 Merge pull request #32 from fredsonnenwald/add-null
Add null type parser
2025-08-18 23:39:11 -03:00
00d88388f8 Fixes Behavior of Pydantic None Type and Adds More Tests 2025-08-18 23:33:16 -03:00
609af7c32b Merge pull request #35 from fredsonnenwald/add-duration
add string duration -> timedelta
2025-08-18 23:05:08 -03:00
c59c1e8768 Merge pull request #36 from HideyoshiNakazone/fix/required-array-field-not-honored
Fix/required array field not honored
2025-08-18 23:00:59 -03:00
7b9464f458 Fixes Array So No DefaultFactory is Created When no Default is Set and Field is Required 2025-08-18 22:53:28 -03:00
617f1aab2b Adds Failing Test Case to Test 2025-08-18 22:27:49 -03:00
Fred Sonnenwald
976708934f add string duration -> timedelta 2025-08-08 12:38:33 +01:00
Fred Sonnenwald
e9d61a1268 Add null type parser 2025-06-30 12:23:47 +01:00
d431dca353 Merge pull request #31 from HideyoshiNakazone/project/revises-roadmap
Revises Roadmap
2025-06-23 16:28:40 -03:00
bf42ad638f Adds Docs to README 2025-06-23 16:27:54 -03:00
1bb0995d79 Revises Roadmap 2025-06-23 16:24:45 -03:00
b56598eb02 Merge pull request #30 from HideyoshiNakazone/feature/adds-const
Feature/adds const
2025-06-23 15:32:37 -03:00
60ac12fe39 Adds Docs to Const Type 2025-06-23 15:30:07 -03:00
198ebecef0 Adds Final Tests for Const Type 2025-06-23 15:18:34 -03:00
65a81a8da5 Initial Const Implementation 2025-06-22 22:13:53 -03:00
42a1ae24fe Merge pull request #29 from HideyoshiNakazone/feature/adds-enums
Adds Aditional Validations in Enum
2025-06-22 17:39:54 -03:00
450d44c064 Adds Aditional Validations in Enum 2025-06-22 17:38:58 -03:00
92172c8711 Merge pull request #28 from HideyoshiNakazone/feature/adds-enums
[Feature] Adds Enums
2025-06-22 17:25:27 -03:00
6c94047ec0 Adds Docs for Enum 2025-06-22 17:21:28 -03:00
ef66903948 Minor Fixes in EnumTypeParser and Adds Better UnitTests 2025-06-22 16:43:53 -03:00
7e591f0525 Initial Implementation of Enum 2025-06-22 11:18:42 -03:00
4ba5d83df6 Merge pull request #27 from HideyoshiNakazone/project/add-funding
Create FUNDING.yml
2025-06-22 10:16:58 -03:00
bdaa0cb5b1 Create FUNDING.yml 2025-06-22 10:16:42 -03:00
0ede98fcf0 Merge pull request #26 from HideyoshiNakazone/feature/add-doc
Fix ReadTheDocs Config File
2025-06-22 08:45:32 -03:00
ed2bb35d45 Fix ReadTheDocs Config File 2025-06-22 08:45:05 -03:00
798ea1d601 Merge pull request #21 from HideyoshiNakazone/feature/add-doc
Feature/add doc
2025-06-21 18:43:38 -03:00
02d11f57b2 Adds Config ReadTheDocs 2025-06-21 18:40:41 -03:00
bcbc83e502 Fixes Minor Fields in Docs 2025-06-21 18:26:14 -03:00
ac239c2617 Adds Docs for AllOf and AnyOf 2025-06-21 18:20:44 -03:00
dee8b02d26 Adds Docs for Ref Type 2025-06-21 11:46:47 -03:00
12471ac804 Adds Docs for Object 2025-06-21 08:39:01 -03:00
b92cf37145 Adds Docs for Array, Bool and Numeric 2025-06-20 23:12:33 -03:00
249195ff26 Finalizes String Doc 2025-06-20 22:54:24 -03:00
c504efe23b Initial Work on Documentation 2025-06-19 23:51:33 -03:00
040ffcba66 Merge pull request #20 from HideyoshiNakazone/feature/ref-type-parser
[FEATURE] Implementation of $ref JSON Schema Keyword
2025-06-19 22:09:11 -03:00
58d4cd9707 Adds Feature Example of the New Feature to the ReadMe 2025-06-19 22:03:28 -03:00
607555898e Final and Tested Version of Ref 2025-06-19 00:39:54 -03:00
37cf59078e Working Version of Root Level Reference 2025-06-13 01:52:20 -03:00
f4effac41c Initial Working $ref Keyword with: ForwardRef, Partial Root Ref and Recursive Ref 2025-06-13 01:36:16 -03:00
188cd28586 **BROKEN INITIAL FOWARDREF** 2025-06-12 02:35:09 -03:00
760f30d08f Initial Implementation of $ref 2025-06-12 01:54:52 -03:00
129114a85f Merge pull request #19 from HideyoshiNakazone/project/fixes-feature-request-issue-template
Update issue templates
2025-06-12 00:53:38 -03:00
3e7d796ef7 Update issue templates 2025-06-12 00:53:28 -03:00
fd967cf6fe Merge pull request #17 from HideyoshiNakazone/project/adds-issue-template
Update issue templates
2025-06-12 00:45:39 -03:00
21c4e4ab75 Update issue templates 2025-06-12 00:45:14 -03:00
cbef7104c4 Merge pull request #16 from HideyoshiNakazone/improvement/better-internal-structure
Better Object Internal Structure and Type Selection
2025-06-04 01:27:29 -03:00
dbbb8e0419 Fixes Tests 2025-06-04 01:26:06 -03:00
4bbb896c46 Fixes Default Values in StringTypeParser 2025-06-04 01:12:45 -03:00
3273fd84bf Fixes Test and Reports 2025-06-03 03:00:49 -03:00
782e09d5e3 Adds Test to SchemaConverter.build Schema Validation 2025-06-03 02:35:25 -03:00
66ca341bb2 Adds Test to AllOf 2025-06-03 02:31:13 -03:00
25d8e68e95 Fixes Test and Reports 2025-06-03 02:20:15 -03:00
be7f04e20d Better TypeParser Kwargs 2025-06-03 02:05:21 -03:00
2b2c823e27 Fixes Test of AllOf 2025-06-03 00:49:54 -03:00
e37e9818ed Initial Work on TypeParser Kwargs 2025-06-03 00:48:22 -03:00
bef42e4cdb Better Object Internal Structure and Type Selection 2025-06-03 00:15:19 -03:00
894969332d Merge pull request #15 from HideyoshiNakazone/fix/better-typing-output
Fixes Typing Output
2025-06-02 20:43:13 -03:00
9e52783b22 Fixes Typing Output 2025-06-02 20:41:50 -03:00
393eaa5e0a Merge pull request #12 from PuChenTW/main
feat(parser): first‑class support for JSON string.format
2025-05-10 20:09:14 -03:00
b9c36a46b4 Merge pull request #13 from HideyoshiNakazone/adds-test-execution-pr
Adds PRs to the Test Execution GithubAction
2025-05-10 20:06:40 -03:00
db3d0eee45 Adds PRs to the Test Execution GithubAction 2025-05-10 20:05:26 -03:00
Pu Chen
b52997633c Support string format 2025-05-06 22:52:08 +08:00
Pu Chen
7a3266e4cc Install email-validator 2025-05-06 21:54:02 +08:00
cba4ef0e21 Merge pull request #11 from HideyoshiNakazone/any-all-ref-implementation
Implements: allOf, anyOf

Finalizes the implementation of allOf and anyOf, but the implementation of oneOf was cancelled for the time being
2025-04-19 17:32:58 -03:00
f9f986e3c8 Fixes Minor Element in AnyOf Test 2025-04-19 17:30:11 -03:00
1c546d252f Omits Test Dir in Test Coverage 2025-04-19 17:26:33 -03:00
b409ce49a5 Fixes Validation of JsonSchema 2025-04-19 17:23:38 -03:00
863494ab9c Finalizes AnyOfTypeParser Tests 2025-04-19 16:57:56 -03:00
509ee60b75 Fixes Import Order jambo.parser 2025-04-19 16:51:27 -03:00
20e4a69968 Move Aux Function to the GenericTypeParser Class 2025-04-19 16:45:32 -03:00
d74e700233 Removes Unecessary Case from ArrayParser 2025-04-19 15:48:54 -03:00
42bc0148b8 Adds Test for Boolean Default Value 2025-04-19 15:46:37 -03:00
c6a37dab74 Better Defaults Validation Implementation 2025-04-19 15:44:27 -03:00
5c3d3a39ba Implements Feature Complete AnyOf Keyword 2025-04-19 15:23:22 -03:00
5fdb4fa724 Removes OneOf due to complexity and niche use case
After further analysis, the functionality was deemed too complex to implement for such a niche use case and will therefore be removed from the implementation backlog
2025-04-17 16:06:55 -03:00
dc350aaa8b Adds Test for AllOfTypeParser Case 2025-04-17 03:07:08 -03:00
d5149061a1 Formats Import Orders 2025-04-17 03:04:38 -03:00
459d9da0b9 Final Implementation of AllOf Keyword 2025-04-17 03:03:22 -03:00
6d1febbcc1 Initial allOf Implementation 2025-04-14 03:22:42 -03:00
eb501fec74 Merge pull request #10 from HideyoshiNakazone/implements-object-default
Implements Object Defaults
2025-04-13 02:48:42 -03:00
7272b1a74b Implements Object Defaults 2025-04-13 02:40:07 -03:00
62f3f9b1c5 Merge pull request #9 from HideyoshiNakazone/better-tests
Implements Better Tests
2025-04-13 02:15:08 -03:00
af0a69ed35 Implements Better Object Tests 2025-04-13 02:13:01 -03:00
970aa50845 Implements Better Tests For: Int, Float, Bool 2025-04-13 01:45:28 -03:00
76b40847ce Implements Better String Tests 2025-04-12 19:37:53 -03:00
ec9171ba8f Implements Better Array Tests 2025-04-12 03:49:50 -03:00
4f68c49658 Merge pull request #8 from HideyoshiNakazone/codecov-report
Codecov report
2025-04-11 23:58:13 -03:00
22677e9811 Merge remote-tracking branch 'origin/main' into codecov-report 2025-04-11 23:57:03 -03:00
e8321f7d94 Adds Codecov Badge to README 2025-04-11 23:56:51 -03:00
470f322ff5 Merge pull request #7 from HideyoshiNakazone/codecov-report
Adds Codecov
2025-04-11 23:52:37 -03:00
21e64be29b Adds Codecov 2025-04-11 23:49:47 -03:00
df1df0daab Merge pull request #6 from HideyoshiNakazone/adds-description
Adds Description
2025-04-11 21:59:14 -03:00
e803e39a92 Adds Description 2025-04-11 21:55:32 -03:00
78 changed files with 8237 additions and 850 deletions

15
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,15 @@
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: hideyoshinakazone
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
polar: # Replace with a single Polar username
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
thanks_dev: # Replace with a single thanks.dev username
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

25
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,25 @@
---
name: Bug report
about: Create a report to help us improve
title: "[BUG] Title Here"
labels: bug
assignees: HideyoshiNakazone
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior
**Expected behavior**
A clear and concise description of what you expected to happen.
**Environment Information:**
- Python:
- Jambo Version:
- Pydantic:
**Additional context**
Add any other context about the problem here.

View File

@@ -0,0 +1,16 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[FEATURE REQUEST] Title Here"
labels: enhancement
assignees: HideyoshiNakazone
---
**Is this a [Json Schema](https://json-schema.org/specification) Keyword that is missing?** [yes|no]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -2,7 +2,10 @@ name: Test and Publish
on: on:
push push:
pull_request:
branches:
- main
permissions: permissions:
contents: read contents: read
@@ -11,6 +14,8 @@ jobs:
test: test:
name: run-tests name: run-tests
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.event_name != 'pull_request' ||
github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name
strategy: strategy:
matrix: matrix:
python-version: python-version:
@@ -35,7 +40,18 @@ jobs:
run: uv sync --all-extras --dev run: uv sync --all-extras --dev
- name: Run tests - name: Run tests
run: uv run poe tests run: |
uv run poe tests
uv run poe tests-report
- name: Static type check
run: uv run poe type-check
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
if: matrix.python-version == '3.10'
publish: publish:
name: publish name: publish

22
.readthedocs.yaml Normal file
View File

@@ -0,0 +1,22 @@
version: 2
# Specify os and python version
build:
os: "ubuntu-24.04"
tools:
python: "3.12"
jobs:
create_environment:
- asdf plugin add uv
- asdf install uv latest
- asdf global uv latest
- UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv sync --all-extras
install:
- "true"
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/source/conf.py
# Optionally build your docs in additional formats such as PDF and ePub
formats: all

121
README.md
View File

@@ -1,10 +1,13 @@
# Jambo - JSON Schema to Pydantic Converter # Jambo - JSON Schema to Pydantic Converter
<p align="center"> <p style="text-align:center">
<a href="https://github.com/HideyoshiNakazone/jambo" target="_blank"> <a href="https://github.com/HideyoshiNakazone/jambo" target="_blank">
<img src="https://img.shields.io/github/last-commit/HideyoshiNakazone/jambo.svg"> <img src="https://img.shields.io/github/last-commit/HideyoshiNakazone/jambo.svg" alt="Last commit">
<img src="https://github.com/HideyoshiNakazone/jambo/actions/workflows/build.yml/badge.svg" alt="Tests"> <img src="https://github.com/HideyoshiNakazone/jambo/actions/workflows/build.yml/badge.svg" alt="Tests">
</a> </a>
<a href="https://codecov.io/gh/HideyoshiNakazone/jambo" target="_blank">
<img src="https://codecov.io/gh/HideyoshiNakazone/jambo/branch/main/graph/badge.svg" alt="Coverage">
</a>
<br /> <br />
<a href="https://pypi.org/project/jambo" target="_blank"> <a href="https://pypi.org/project/jambo" target="_blank">
<img src="https://badge.fury.io/py/jambo.svg" alt="Package version"> <img src="https://badge.fury.io/py/jambo.svg" alt="Package version">
@@ -16,18 +19,31 @@
</p> </p>
**Jambo** is a Python package that automatically converts [JSON Schema](https://json-schema.org/) definitions into [Pydantic](https://docs.pydantic.dev/) models. **Jambo** is a Python package that automatically converts [JSON Schema](https://json-schema.org/) definitions into [Pydantic](https://docs.pydantic.dev/) models.
It's designed to streamline schema validation and enforce type safety using Pydantic's powerful validation features. It's designed to streamline schema validation and enforce type safety using Pydantic's validation features.
Created to simplifying the process of dynamically generating Pydantic models for AI frameworks like [LangChain](https://www.langchain.com/), [CrewAI](https://www.crewai.com/), and others. Created to simplify the process of dynamically generating Pydantic models for AI frameworks like [LangChain](https://www.langchain.com/), [CrewAI](https://www.crewai.com/), and others.
--- ---
## ✨ Features ## ✨ Features
- ✅ Convert JSON Schema into Pydantic models dynamically - ✅ Convert JSON Schema into Pydantic models dynamically;
- 🔒 Supports validation for strings, integers, floats, booleans, arrays, and nested objects - 🔒 Supports validation for:
- ⚙️ Enforces constraints like `minLength`, `maxLength`, `pattern`, `minimum`, `maximum`, `uniqueItems`, and more - strings
- 📦 Zero config — just pass your schema and get a model - integers
- floats
- booleans
- arrays
- nested objects
- allOf
- anyOf
- oneOf
- ref
- enum
- const
- ⚙️ Enforces constraints like `minLength`, `maxLength`, `pattern`, `minimum`, `maximum`, `uniqueItems`, and more;
- 📦 Zero config — just pass your schema and get a model.
--- ---
@@ -41,8 +57,18 @@ pip install jambo
## 🚀 Usage ## 🚀 Usage
There are two ways to build models with Jambo:
1. The original static API: `SchemaConverter.build(schema)` doesn't persist any reference cache between calls and doesn't require any configuration.
2. The new instance API: use a `SchemaConverter()` instance and call `build_with_cache`, which exposes and persists a reference cache and helper methods.
The instance API is useful when you want to reuse generated subtypes, inspect cached models, or share caches between converters; all leveraging namespaces via the `$id` property in JSON Schema. See the docs for full details: https://jambo.readthedocs.io/en/latest/usage.ref_cache.html
### Static (compatibility) example
```python ```python
from jambo.schema_converter import SchemaConverter from jambo import SchemaConverter
schema = { schema = {
"title": "Person", "title": "Person",
@@ -54,19 +80,52 @@ schema = {
"required": ["name"], "required": ["name"],
} }
# Old-style convenience API (kept for compatibility)
Person = SchemaConverter.build(schema) Person = SchemaConverter.build(schema)
obj = Person(name="Alice", age=30) obj = Person(name="Alice", age=30)
print(obj) print(obj)
``` ```
### Instance API (recommended for cache control)
```python
from jambo import SchemaConverter
converter = SchemaConverter()
schema = {
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"address": {"type": "object", "properties": {"street": {"type": "string"}}},
},
"required": ["name"],
}
# build_with_cache populates the converter's instance-level ref cache
Person = converter.build_with_cache(schema)
# you can retrieve cached subtypes by name/path
cached_person = converter.get_cached_ref("Person")
# clear the instance cache when needed
converter.clear_ref_cache()
```
--- ---
## ✅ Example Validations ## ✅ Example Validations
Following are some examples of how to use Jambo to create Pydantic models with various JSON Schema features, but for more information, please refer to the [documentation](https://jambo.readthedocs.io/).
### Strings with constraints ### Strings with constraints
```python ```python
from jambo import SchemaConverter
schema = { schema = {
"title": "EmailExample", "title": "EmailExample",
"type": "object", "type": "object",
@@ -89,6 +148,9 @@ print(obj)
### Integers with bounds ### Integers with bounds
```python ```python
from jambo import SchemaConverter
schema = { schema = {
"title": "AgeExample", "title": "AgeExample",
"type": "object", "type": "object",
@@ -106,6 +168,9 @@ print(obj)
### Nested Objects ### Nested Objects
```python ```python
from jambo import SchemaConverter
schema = { schema = {
"title": "NestedObjectExample", "title": "NestedObjectExample",
"type": "object", "type": "object",
@@ -127,6 +192,41 @@ obj = Model(address={"street": "Main St", "city": "Gotham"})
print(obj) print(obj)
``` ```
### References
```python
from jambo import SchemaConverter
schema = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"emergency_contact": {
"$ref": "#/$defs/person",
},
},
}
},
}
model = SchemaConverter.build(schema)
obj = model(
name="John",
age=30,
emergency_contact=model(
name="Jane",
age=28,
),
)
```
--- ---
## 🧪 Running Tests ## 🧪 Running Tests
@@ -167,9 +267,6 @@ poe create-hooks
## 📌 Roadmap / TODO ## 📌 Roadmap / TODO
- [ ] Support for `enum` and `const`
- [ ] Support for `anyOf`, `allOf`, `oneOf`
- [ ] Schema ref (`$ref`) resolution
- [ ] Better error reporting for unsupported schema types - [ ] Better error reporting for unsupported schema types
--- ---

29
docs/Makefile Normal file
View File

@@ -0,0 +1,29 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SPHINXAPIDOC ?= sphinx-apidoc
SOURCEDIR = source
BUILDDIR = build
SCANEDDIR = ../jambo
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
rescan:
$(SPHINXAPIDOC) -f -o $(SOURCEDIR) $(SCANEDDIR) $(EXCLUDEDIR)
clean:
rm -rf $(BUILDDIR)/*
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

35
docs/make.bat Normal file
View File

@@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org/
exit /b 1
)
if "%1" == "" goto help
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

41
docs/source/conf.py Normal file
View File

@@ -0,0 +1,41 @@
# Configuration file for the Sphinx documentation builder.
#
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
project = "jambo"
copyright = "2025, Vitor Hideyoshi"
author = "Vitor Hideyoshi"
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
extensions = [
"sphinx.ext.todo",
"sphinx.ext.viewcode",
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx_autodoc_typehints", # <-- needed
]
templates_path = ["_templates"]
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = "sphinx_rtd_theme"
html_static_path = ["_static"]
# -- Options for autodoc -----------------------------------------------------
add_module_names = False
python_use_unqualified_type_names = True
autodoc_typehints = "both"

33
docs/source/index.rst Normal file
View File

@@ -0,0 +1,33 @@
.. jambo documentation master file, created by
sphinx-quickstart on Thu Jun 19 22:20:35 2025.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Jambo - JSON Schema to Pydantic Converter
=========================================
This is the documentation for Jambo, a tool that converts JSON Schema definitions into Pydantic models.
Welcome to Jambo's documentation!
Jambo is a Python package that automatically converts JSON Schema definitions into Pydantic models. It's designed to streamline schema validation and enforce type safety using Pydantic's powerful validation features.
Created to simplifying the process of dynamically generating Pydantic models for AI frameworks like LangChain, CrewAI, and others.
Installation
------------------
You can install Jambo using pip:
.. code-block:: bash
pip install jambo
.. toctree::
:maxdepth: 2
:caption: Contents:
usage
modules

View File

@@ -0,0 +1,37 @@
jambo.exceptions package
========================
Submodules
----------
jambo.exceptions.internal\_assertion\_exception module
------------------------------------------------------
.. automodule:: jambo.exceptions.internal_assertion_exception
:members:
:show-inheritance:
:undoc-members:
jambo.exceptions.invalid\_schema\_exception module
--------------------------------------------------
.. automodule:: jambo.exceptions.invalid_schema_exception
:members:
:show-inheritance:
:undoc-members:
jambo.exceptions.unsupported\_schema\_exception module
------------------------------------------------------
.. automodule:: jambo.exceptions.unsupported_schema_exception
:members:
:show-inheritance:
:undoc-members:
Module contents
---------------
.. automodule:: jambo.exceptions
:members:
:show-inheritance:
:undoc-members:

View File

@@ -0,0 +1,117 @@
jambo.parser package
====================
Submodules
----------
jambo.parser.allof\_type\_parser module
---------------------------------------
.. automodule:: jambo.parser.allof_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.anyof\_type\_parser module
---------------------------------------
.. automodule:: jambo.parser.anyof_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.array\_type\_parser module
---------------------------------------
.. automodule:: jambo.parser.array_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.boolean\_type\_parser module
-----------------------------------------
.. automodule:: jambo.parser.boolean_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.const\_type\_parser module
---------------------------------------
.. automodule:: jambo.parser.const_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.enum\_type\_parser module
--------------------------------------
.. automodule:: jambo.parser.enum_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.float\_type\_parser module
---------------------------------------
.. automodule:: jambo.parser.float_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.int\_type\_parser module
-------------------------------------
.. automodule:: jambo.parser.int_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.null\_type\_parser module
--------------------------------------
.. automodule:: jambo.parser.null_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.object\_type\_parser module
----------------------------------------
.. automodule:: jambo.parser.object_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.oneof\_type\_parser module
---------------------------------------
.. automodule:: jambo.parser.oneof_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.ref\_type\_parser module
-------------------------------------
.. automodule:: jambo.parser.ref_type_parser
:members:
:show-inheritance:
:undoc-members:
jambo.parser.string\_type\_parser module
----------------------------------------
.. automodule:: jambo.parser.string_type_parser
:members:
:show-inheritance:
:undoc-members:
Module contents
---------------
.. automodule:: jambo.parser
:members:
:show-inheritance:
:undoc-members:

31
docs/source/jambo.rst Normal file
View File

@@ -0,0 +1,31 @@
jambo package
=============
Subpackages
-----------
.. toctree::
:maxdepth: 4
jambo.exceptions
jambo.parser
jambo.types
Submodules
----------
jambo.schema\_converter module
------------------------------
.. automodule:: jambo.schema_converter
:members:
:show-inheritance:
:undoc-members:
Module contents
---------------
.. automodule:: jambo
:members:
:show-inheritance:
:undoc-members:

View File

@@ -0,0 +1,29 @@
jambo.types package
===================
Submodules
----------
jambo.types.json\_schema\_type module
-------------------------------------
.. automodule:: jambo.types.json_schema_type
:members:
:show-inheritance:
:undoc-members:
jambo.types.type\_parser\_options module
----------------------------------------
.. automodule:: jambo.types.type_parser_options
:members:
:show-inheritance:
:undoc-members:
Module contents
---------------
.. automodule:: jambo.types
:members:
:show-inheritance:
:undoc-members:

7
docs/source/modules.rst Normal file
View File

@@ -0,0 +1,7 @@
jambo
=====
.. toctree::
:maxdepth: 4
jambo

View File

@@ -0,0 +1,39 @@
AllOf Type
=================
The AllOf type is used to combine multiple schemas into a single schema. It allows you to specify that an object must conform to all of the specified schemas.
Examples
-----------------
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "Person",
"description": "A person",
"type": "object",
"properties": {
"name": {
"allOf": [
{"type": "string", "maxLength": 11},
{"type": "string", "maxLength": 4},
{"type": "string", "minLength": 1},
{"type": "string", "minLength": 2},
]
},
},
}
Model = SchemaConverter.build(schema)
obj = Model(name="J")
print(obj) # Output: Person(name='J')
try:
obj = Model(name="") # This will raise a validation error
except ValueError as e:
print("Validation fails as expected:", e) # Output: Validation fails as expected: 1 validation error for Person

View File

@@ -0,0 +1,41 @@
AnyOf Type
=================
The AnyOf type is used to specify that an object can conform to any one of the specified schemas. It allows for flexibility in the structure of the data, as it can match multiple possible schemas.
Examples
-----------------
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "Person",
"description": "A person",
"type": "object",
"properties": {
"id": {
"anyOf": [
{"type": "integer"},
{"type": "string"},
]
},
},
}
Model = SchemaConverter.build(schema)
obj1 = Model(id="1")
print(obj1) # Output: Person(id='1')
obj2 = Model(id=1)
print(obj2) # Output: Person(id=1)
try:
obj3 = Model(name=1.1) # This will raise a validation error
except ValueError as e:
print("Validation fails as expected:", e) # Output: Validation fails as expected: 1 validation error for Person

View File

@@ -0,0 +1,86 @@
Array Type
=================
The Array type has the following required properties:
- items: Schema for the items in the array, which can be a type or a schema object.
And the additional supported properties:
- maxItems: Maximum number of items in the array.
- minItems: Minimum number of items in the array.
- uniqueItems: If true, all items in the array must be unique.
And the additional generic properties:
- default: Default value for the array.
- description: Description of the array field.
Examples
-----------------
1. Basic Array with maxItems and minItems:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "ArrayExample",
"type": "object",
"properties": {
"tags": {
"type": "array",
"items": {"type": "string"},
"minItems": 1,
"maxItems": 5,
},
},
"required": ["tags"],
}
Model = SchemaConverter.build(schema)
obj = Model(tags=["python", "jambo", "pydantic"])
print(obj) # Output: ArrayExample(tags=['python', 'jambo', 'pydantic'])
try:
obj = Model(tags=[]) # This will raise a validation error
except ValueError as e:
print("Validation fails as expected:", e) # Output: Validation fails as expected: 1 validation error for ArrayExample
2. Array with uniqueItems:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "UniqueArrayExample",
"type": "object",
"properties": {
"unique_tags": {
"type": "array",
"items": {"type": "string"},
"uniqueItems": True,
},
},
"required": ["unique_tags"],
}
Model = SchemaConverter.build(schema)
obj = Model(unique_tags=["python", "jambo", "pydantic"])
print(obj) # Output: UniqueArrayExample(unique_tags={'python', 'jambo', 'pydantic'})
try:
obj = Model(unique_tags=["python", "jambo", "python"]) # This will raise a validation error
except ValueError as e:
print("Validation fails as expected:", e) # Output: Validation fails as expected: 1 validation error for UniqueArrayExample

View File

@@ -0,0 +1,34 @@
Bool Types
=================
The Bool type has no specific properties, it has only the generic properties:
- default: Default value for the string.
- description: Description of the string field.
Examples
-----------------
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "BoolExample",
"type": "object",
"properties": {
"is_active": {
"type": "boolean",
},
},
"required": ["is_active"],
}
Model = SchemaConverter.build(schema)
obj = Model(is_active=True)
print(obj) # Output: BoolExample(is_active=True)

View File

@@ -0,0 +1,40 @@
Const Type
=================
The const type is a special data type that allows a variable to be a single, fixed value.
It does not have the same properties as the other generic types, but it has the following specific properties:
- const: The fixed value that the variable must always hold.
- description: Description of the const field.
Examples
-----------------
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "Country",
"type": "object",
"properties": {
"name": {
"const": "United States of America",
}
},
"required": ["name"],
}
Model = SchemaConverter.build(schema)
obj = Model()
self.assertEqual(obj.name, "United States of America")
with self.assertRaises(ValueError):
obj.name = "Canada"
with self.assertRaises(ValueError):
Model(name="Canada")

View File

@@ -0,0 +1,37 @@
Enum Type
==================
An enum type is a special data type that enables a variable to be a set of predefined constants. The enum type is used to define variables that can only take one out of a small set of possible values.
It does not have any specific properties, but it has the generic properties:
- default: Default value for the enum.
- description: Description of the enum field.
Examples
-----------------
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "EnumExample",
"type": "object",
"properties": {
"status": {
"type": "string",
"enum": ["active", "inactive", "pending"],
"description": "The status of the object.",
"default": "active",
},
},
"required": ["status"],
}
Model = SchemaConverter.build(schema)
obj = Model(status="active")
print(obj) # Output: EnumExample(status=status.ACTIVE)

View File

@@ -0,0 +1,118 @@
Numeric Types
=================
The Numeric Types (integer, number) have the following supported properties:
- minimum: Minimum value for the number.
- maximum: Maximum value for the number.
- exclusiveMinimum: If true, the value must be greater than the minimum.
- exclusiveMaximum: If true, the value must be less than the maximum.
- multipleOf: The value must be a multiple of this number.
And the additional generic properties:
- default: Default value for the string.
- description: Description of the string field.
Examples
-----------------
1. Basic Integer with minimum and maximum:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "IntegerExample",
"type": "object",
"properties": {
"age": {
"type": "integer",
"minimum": 0,
"maximum": 120,
},
},
"required": ["age"],
}
Model = SchemaConverter.build(schema)
obj = Model(age=30)
print(obj) # Output: IntegerExample(age=30)
try:
obj = Model(age=-5) # This will raise a validation error
except ValueError as e:
print("Validation fails as expected:", e) # Output: Validation fails as expected: 1 validation error for IntegerExample
2. Number with exclusiveMinimum and exclusiveMaximum:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "NumberExample",
"type": "object",
"properties": {
"price": {
"type": "number",
"exclusiveMinimum": 0,
"exclusiveMaximum": 1000,
},
},
"required": ["price"],
}
Model = SchemaConverter.build(schema)
obj = Model(price=1)
print(obj) # Output: NumberExample(price=1)
try:
obj = Model(price=0) # This will raise a validation error
except ValueError as e:
print("Validation fails as expected:", e) # Output: Validation fails as expected: 1 validation error for NumberExample
obj = Model(price=999)
print(obj) # Output: NumberExample(price=999)
try:
obj = Model(price=1000) # This will raise a validation error
except ValueError as e:
print("Validation fails as expected:", e) # Output: Validation fails as expected: 1 validation error for NumberExample
3. Number with multipleOf:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "MultipleOfExample",
"type": "object",
"properties": {
"quantity": {
"type": "number",
"multipleOf": 0.5,
},
},
"required": ["quantity"],
}
Model = SchemaConverter.build(schema)
obj = Model(quantity=2.5)
print(obj) # Output: MultipleOfExample(quantity=2.5)
try:
obj = Model(quantity=2.3) # This will raise a validation error
except ValueError as e:
print("Validation fails as expected:", e) # Output: Validation fails as expected: 1 validation error for MultipleOfExample

View File

@@ -0,0 +1,46 @@
Object Type
=================
The Bool type has no specific properties, it has only the generic properties:
- default: Default value for the string.
- description: Description of the string field.
Examples
-----------------
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "Person",
"type": "object",
"properties": {
"address": {
"type": "object",
"properties": {
"street": {"type": "string"},
"city": {"type": "string"},
},
"default": {
"street": "Unknown Street",
"city": "Unknown City",
},
},
},
"description": "A person object containing a address.",
"required": ["address"],
}
Person = SchemaConverter.build(schema)
obj = Person.model_validate({ "address": {"street": "123 Main St", "city": "Springfield"} })
print(obj) # Output: Person(address=Address(street='123 Main St', city='Springfield'))
obj_default = Person() # Uses default values
print(obj_default) # Output: Person(address=Address(street='Unknown Street', city='Unknown City'))

112
docs/source/usage.oneof.rst Normal file
View File

@@ -0,0 +1,112 @@
OneOf Type
=================
The OneOf type is used to specify that an object must conform to exactly one of the specified schemas. Unlike AnyOf which allows matching multiple schemas, OneOf enforces that the data matches one and only one of the provided schemas.
Examples
-----------------
1. **Overlapping String Example** - A field that accepts strings with overlapping constraints:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "SimpleExample",
"type": "object",
"properties": {
"value": {
"oneOf": [
{"type": "string", "maxLength": 6},
{"type": "string", "minLength": 4}
]
}
},
"required": ["value"]
}
Model = SchemaConverter.build(schema)
# Valid: Short string (matches first schema only)
obj1 = Model(value="hi")
print(obj1.value) # Output: hi
# Valid: Long string (matches second schema only)
obj2 = Model(value="very long string")
print(obj2.value) # Output: very long string
# Invalid: Medium string (matches BOTH schemas - violates oneOf)
try:
obj3 = Model(value="hello") # 5 chars: matches maxLength=6 AND minLength=4
except ValueError as e:
print("Validation fails as expected:", e)
2. **Discriminator Example** - Different shapes with a type field:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "Shape",
"type": "object",
"properties": {
"shape": {
"oneOf": [
{
"type": "object",
"properties": {
"type": {"const": "circle"},
"radius": {"type": "number", "minimum": 0}
},
"required": ["type", "radius"]
},
{
"type": "object",
"properties": {
"type": {"const": "rectangle"},
"width": {"type": "number", "minimum": 0},
"height": {"type": "number", "minimum": 0}
},
"required": ["type", "width", "height"]
}
],
"discriminator": {
"propertyName": "type"
}
}
},
"required": ["shape"]
}
Model = SchemaConverter.build(schema)
# Valid: Circle
circle = Model(shape={"type": "circle", "radius": 5.0})
print(circle.shape.type) # Output: circle
# Valid: Rectangle
rectangle = Model(shape={"type": "rectangle", "width": 10, "height": 20})
print(rectangle.shape.type) # Output: rectangle
# Invalid: Wrong properties for the type
try:
invalid = Model(shape={"type": "circle", "width": 10})
except ValueError as e:
print("Validation fails as expected:", e)
.. note::
OneOf ensures exactly one schema matches. The discriminator helps Pydantic efficiently determine which schema to use based on a specific property value.
.. warning::
If your data could match multiple schemas in a oneOf, validation will fail. Ensure schemas are mutually exclusive.
.. warning::
The discriminator feature is not officially in the JSON Schema specification, it was introduced by OpenAPI. Use it with caution and ensure it fits your use case.

View File

@@ -0,0 +1,348 @@
===============
Reference Cache
===============
The reference cache is named after the mechanism used to implement
the `$ref` keyword in the JSON Schema specification.
Internally, the cache is used by both :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>`
and :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>`.
However, only :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` exposes the cache through a supported API;
:py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>` uses the cache internally and does not provide access to it.
The reference cache accepts a mutable mapping (typically a plain Python dict)
that maps reference names (strings) to generated Pydantic model classes.
Since only the reference names are stored it can cause name collisions if
multiple schemas with overlapping names are processed using the same cache.
Therefore, it's recommended that each namespace or schema source uses its own
:class:`SchemaConverter` instance.
-----------------------------------------
Configuring and Using the Reference Cache
-----------------------------------------
The reference cache can be used in three ways:
* Without a persistent reference cache (no sharing between calls).
* Passing an explicit ``ref_cache`` dictionary to a call.
* Using the converter instance's default cache (the instance-level cache).
Usage Without Reference Cache
=============================
When you run the library without a persistent reference cache, the generated
types are not stored for reuse. Each call to a build method creates fresh
Pydantic model classes (they will have different Python object identities).
Because nothing is cached, you cannot look up generated subtypes later.
This is the default behaviour of :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>`.
You can achieve the same behaviour with :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` by
passing ``without_cache=True``.
Usage: Manually Passing a Reference Cache
=========================================
You can create and pass your own mutable mapping (typically a plain dict)
as the reference cache. This gives you full control over sharing and
lifetime of cached types. When two converters share the same dict, types
created by one converter will be reused by the other.
.. code-block:: python
from jambo import SchemaConverter
# a shared cache you control
shared_cache = {}
converter1 = SchemaConverter(shared_cache)
converter2 = SchemaConverter(shared_cache)
model1 = converter1.build_with_cache(schema)
model2 = converter2.build_with_cache(schema)
# Because both converters use the same cache object, the built models are the same object
assert model1 is model2
If you prefer a per-call cache (leaving the converter's instance cache unchanged), pass the ``ref_cache`` parameter to
:py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>`:
.. code-block:: python
# pass an explicit, private cache for this call only
model_a = converter1.build_with_cache(schema, ref_cache={})
model_b = converter1.build_with_cache(schema, ref_cache={})
# because each call received a fresh dict, the resulting model classes are distinct
assert model_a is not model_b
Usage: Using the Instance Default (Instance-level) Cache
=======================================================
By default, a :class:`SchemaConverter` instance creates and keeps an internal
reference cache (a plain dict). Reusing the same converter instance across
multiple calls will reuse that cache and therefore reuse previously generated
model classes.
That cache is isolated per namespace via the `$id` property in JSON Schema, so
schemas with different `$id` values will not collide in the same cache.
.. code-block:: python
from jambo import SchemaConverter
# no $id in this example, therefore a default namespace is used
schema = {
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"address": {
"type": "object",
"properties": {
"street": {"type": "string"},
"city": {"type": "string"},
},
"required": ["street", "city"],
},
},
"required": ["name", "address"],
}
converter = SchemaConverter() # has its own internal cache
model1 = converter.build_with_cache(schema)
model2 = converter.build_with_cache(schema)
# model1 and model2 are the same object because the instance cache persisted
assert model1 is model2
When passing a schema with a different `$id`, the instance cache keeps types
separate:
.. code-block:: python
schema_a = {
"$id": "namespace_a",
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
},
"required": ["name"],
}
schema_b = {
"$id": "namespace_b",
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
},
"required": ["name"],
}
converter = SchemaConverter() # has its own internal cache
model_a = converter.build_with_cache(schema_a)
model_b = converter.build_with_cache(schema_b)
# different $id values isolate the types in the same cache
assert model_a is not model_b
If you want to temporarily avoid using the instance cache for a single call,
use ``without_cache=True``. That causes :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` to
use a fresh, empty cache for the duration of that call only:
.. code-block:: python
model1 = converter.build_with_cache(schema, without_cache=True)
model2 = converter.build_with_cache(schema, without_cache=True)
# each call used a fresh cache, so the models are distinct
assert model1 is not model2
Inspecting and Managing the Cache
=================================
The converter provides a small, explicit API to inspect and manage the
instance cache.
Retrieving cached types
-----------------------
:py:meth:`SchemaConverter.get_cached_ref <jambo.SchemaConverter.get_cached_ref>`(name, namespace="default") — returns a cached model class or ``None``.
Retrieving the root type of the schema
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When retrieving the root type of a schema, pass the schema's ``title`` property as the name.
.. code-block:: python
from jambo import SchemaConverter
converter = SchemaConverter()
schema = {
"title": "person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
person_model = converter.build_with_cache(schema)
cached_person_model = converter.get_cached_ref("person")
Retrieving a subtype
~~~~~~~~~~~~~~~~~~~~
When retrieving a subtype, pass a path string (for example, ``parent_name.field_name``) as the name.
.. code-block:: python
from jambo import SchemaConverter
converter = SchemaConverter()
schema = {
"title": "person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"address": {
"type": "object",
"properties": {
"street": {"type": "string"},
"city": {"type": "string"},
},
"required": ["street", "city"],
},
}
}
person_model = converter.build_with_cache(schema)
cached_address_model = converter.get_cached_ref("person.address")
Retrieving a type from ``$defs``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When retrieving a type defined in ``$defs``, access it directly by its name.
.. code-block:: python
from jambo import SchemaConverter
converter = SchemaConverter()
schema = {
"title": "person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"address": {"$ref": "#/$defs/address"},
},
"$defs": {
"address": {
"type": "object",
"properties": {
"street": {"type": "string"},
"city": {"type": "string"},
},
"required": ["street", "city"],
}
},
}
person_model = converter.build_with_cache(schema)
cached_address_model = converter.get_cached_ref("address")
Isolation by Namespace
~~~~~~~~~~~~~~~~~~~~~~
The instance cache is isolated per namespace via the `$id` property in JSON Schema.
When retrieving a cached type, you can specify the namespace to look in
(via the ``namespace`` parameter). By default, the ``default`` namespace is used
.. code-block:: python
from jambo import SchemaConverter
converter = SchemaConverter()
schema_a = {
"$id": "namespace_a",
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
},
"required": ["name"],
}
schema_b = {
"$id": "namespace_b",
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
},
"required": ["name"],
}
person_a = converter.build_with_cache(schema_a)
person_b = converter.build_with_cache(schema_b)
cached_person_a = converter.get_cached_ref("Person", namespace="namespace_a")
cached_person_b = converter.get_cached_ref("Person", namespace="namespace_b")
assert cached_person_a is person_a
assert cached_person_b is person_b
Clearing the cache
------------------
:py:meth:`SchemaConverter.clear_ref_cache <jambo.SchemaConverter.clear_ref_cache>`(namespace: Optional[str]="default") — removes all entries from the instance cache.
When you want to clear the instance cache, use :py:meth:`SchemaConverter.clear_ref_cache <jambo.SchemaConverter.clear_ref_cache>`.
You can optionally specify a ``namespace`` to clear only that namespace;
otherwise, the default namespace is cleared.
If you want to clear all namespaces, call :py:meth:`SchemaConverter.clear_ref_cache <jambo.SchemaConverter.clear_ref_cache>` passing `None` as the namespace,
which removes all entries from all namespaces.
Notes and Behavioural Differences
================================
* :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>` does not expose or persist an instance cache. If you call it without
providing a ``ref_cache`` it will create and use a temporary cache for that
call only; nothing from that call will be available later via
:py:meth:`SchemaConverter.get_cached_ref <jambo.SchemaConverter.get_cached_ref>`.
* :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` is the supported entry point when you want
cache control: it uses the instance cache by default, accepts an explicit
``ref_cache`` dict for per-call control, or uses ``without_cache=True`` to
run with an ephemeral cache.
References in the Test Suite
============================
These behaviours are exercised in the project's tests; see :mod:`tests.test_schema_converter`
for examples and additional usage notes.

View File

@@ -0,0 +1,85 @@
Reference Type
===================
The Reference type allows you to reference another schema by its `$ref` property. This is useful for reusing schemas across your application.
The Reference type has no specific properties, it has only the generic properties:
- default: Default value for the reference.
- description: Description of the reference field.
Examples
-----------------
1. Reference to the Root schema:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"emergency_contact": {
"$ref": "#"
}
},
"required": ["name"],
}
Model = SchemaConverter.build(schema)
obj = Model(name="Alice", age=30, emergency_contact=Model(name="Bob", age=25))
print(obj) # Output: Person(name='Alice', age=30, emergency_contact=Person(name='Bob', age=25))
2. Reference to a Def Schema:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"address": {
"$ref": "#/$defs/Address"
}
},
"required": ["name"],
"$defs": {
"Address": {
"type": "object",
"properties": {
"street": {"type": "string"},
"city": {"type": "string"},
},
"required": ["street", "city"],
}
},
}
Model = SchemaConverter.build(schema)
obj = Model(name="Alice", age=30, address={"street": "123 Main St", "city": "Springfield"})
print(obj) # Output: Person(name='Alice', age=30, address=Address(street='123 Main St', city='Springfield'))
.. note::
At the moment, Jambo doesn't have a way to expose the class definition :py:class:`Address` defined inside the `$defs` property,
but you can access the model class by using the `Model.__fields__` attribute to get the field definitions,
or by using the `Model.model_fields` property to get a dictionary of field names and their types.
.. warning::
The JSON Schema Reference specification allows for uri referneces,
but Jambo currently only supports root references (using the `#` symbol)
and def references (using the `$def` property).

129
docs/source/usage.rst Normal file
View File

@@ -0,0 +1,129 @@
===================
Using Jambo
===================
Jambo is designed to be easy to use. It doesn't require complex setup or configuration when not needed, while providing more powerful instance methods when you do need control.
Below is an example of how to use Jambo to convert a JSON Schema into a Pydantic model.
-------------------------
Static Method (no config)
-------------------------
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"address": {
"type": "object",
"properties": {
"street": {"type": "string"},
"city": {"type": "string"},
},
"required": ["street", "city"],
},
},
"required": ["name", "address"],
}
Person = SchemaConverter.build(schema)
obj = Person(name="Alice", age=30)
print(obj)
# Output: Person(name='Alice', age=30)
The :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>` static method takes a JSON Schema dictionary and returns a Pydantic model class.
Note: the static ``build`` method was the original public API of this library. It creates and returns a model class for the provided schema but does not expose or persist an instance cache.
--------------------------------
Instance Method (with ref cache)
--------------------------------
.. code-block:: python
from jambo import SchemaConverter
converter = SchemaConverter()
schema = {
"title": "Person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"address": {
"type": "object",
"properties": {
"street": {"type": "string"},
"city": {"type": "string"},
},
"required": ["street", "city"],
},
},
"required": ["name", "address"],
}
# The instance API (build_with_cache) populates the converter's instance-level reference cache
Person = converter.build_with_cache(schema)
obj = Person(name="Alice", age=30)
print(obj)
# Output: Person(name='Alice', age=30)
# When using the converter's built-in instance cache (no ref_cache passed to the call),
# all object types parsed during the build are stored and can be retrieved via get_cached_ref.
cached_person_model = converter.get_cached_ref("Person")
assert Person is cached_person_model # the cached class is the same object that was built
# A nested/subobject type can also be retrieved from the instance cache
cached_address_model = converter.get_cached_ref("Person.address")
The :py:meth:`SchemaConverter.build_with_cache <jambo.SchemaConverter.build_with_cache>` instance method was added after the
initial static API to make it easier to access and reuse subtypes defined in a schema.
Unlike the original static :py:meth:`SchemaConverter.build <jambo.SchemaConverter.build>`,
the instance method persists and exposes the reference cache and provides helpers such as
:py:meth:`SchemaConverter.get_cached_ref <jambo.SchemaConverter.get_cached_ref>` and
:py:meth:`SchemaConverter.clear_ref_cache <jambo.SchemaConverter.clear_ref_cache>`.
.. warning::
The instance API with reference cache can lead to schema and type name collisions if not managed carefully.
It's recommended that each schema defines its own unique namespace using the `$id` property in JSON Schema,
and then access it's ref_cache by passing it explicitly when needed.
For details and examples about the reference cache and the different cache modes (instance cache, per-call cache, ephemeral cache), see:
.. toctree::
usage.ref_cache
Type System
-----------
For a full explanation of the supported schemas and types see our documentation on types:
.. toctree::
:maxdepth: 2
usage.string
usage.numeric
usage.bool
usage.array
usage.object
usage.reference
usage.allof
usage.anyof
usage.oneof
usage.enum
usage.const

View File

@@ -0,0 +1,107 @@
String Type
=================
The String type has the following supported properties:
- maxLength: Maximum length of the string.
- minLength: Minimum length of the string.
- pattern: Regular expression pattern that the string must match.
- format: A string format that can be used to validate the string (e.g., "email", "uri").
And the additional generic properties:
- default: Default value for the string.
- description: Description of the string field.
Examples
-----------------
1. Basic String with maxLength and minLength:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "StringExample",
"type": "object",
"properties": {
"attr1": {
"type": "string",
"minLength": 5,
"maxLength": 50,
},
},
"required": ["attr1"],
}
Model = SchemaConverter.build(schema)
obj = Model(attr1="this_is_a_valid_string")
print(obj)
# Output: StringExample(attr1='this_is_a_valid_string')
2. String with pattern and format:
Pattern example:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "StringExample",
"type": "object",
"properties": {
"email": {
"type": "string",
"pattern": r"^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$",
},
},
"required": ["email"],
}
Model = SchemaConverter.build(schema)
obj = Model(email="test@email.com")
print(obj)
# Output: StringExample(email='test@email.com')
try:
Model(email="invalid-email")
except ValueError as e:
print("Validation Failed as Expected") # Output: Validation Failed as Expected
Format example:
.. code-block:: python
from jambo import SchemaConverter
schema = {
"title": "StringExample",
"type": "object",
"properties": {
"email": {
"type": "string",
"format": "email",
},
},
"required": ["email"],
}
Model = SchemaConverter.build(schema)
obj = Model(email="test@email.com")
print(obj)
# Output: StringExample(email='test@email.com')
try:
Model(email="invalid-email")
except ValueError as e:
print("Validation Failed as Expected") # Output: Validation Failed as Expected

View File

@@ -0,0 +1,6 @@
from .schema_converter import SchemaConverter
__all__ = [
"SchemaConverter" # Exports the schema converter class for external use
]

View File

@@ -0,0 +1,10 @@
from .internal_assertion_exception import InternalAssertionException
from .invalid_schema_exception import InvalidSchemaException
from .unsupported_schema_exception import UnsupportedSchemaException
__all__ = [
"InternalAssertionException",
"InvalidSchemaException",
"UnsupportedSchemaException",
]

View File

@@ -0,0 +1,16 @@
class InternalAssertionException(RuntimeError):
"""Exception raised for internal assertions."""
def __init__(
self,
message: str,
) -> None:
# Normalize message by stripping redundant prefix if present
message = message.removeprefix("Internal Assertion Failed: ")
super().__init__(message)
def __str__(self) -> str:
return (
f"Internal Assertion Failed: {super().__str__()}\n"
"This is likely a bug in Jambo. Please report it at"
)

View File

@@ -0,0 +1,27 @@
from typing_extensions import Optional
class InvalidSchemaException(ValueError):
"""Exception raised for invalid JSON schemas."""
def __init__(
self,
message: str,
invalid_field: Optional[str] = None,
cause: Optional[BaseException] = None,
) -> None:
# Normalize message by stripping redundant prefix if present
message = message.removeprefix("Invalid JSON Schema: ")
self.invalid_field = invalid_field
self.cause = cause
super().__init__(message)
def __str__(self) -> str:
base_msg = f"Invalid JSON Schema: {super().__str__()}"
if self.invalid_field:
return f"{base_msg} (invalid field: {self.invalid_field})"
if self.cause:
return (
f"{base_msg} (caused by {self.cause.__class__.__name__}: {self.cause})"
)
return base_msg

View File

@@ -0,0 +1,23 @@
from typing_extensions import Optional
class UnsupportedSchemaException(ValueError):
"""Exception raised for unsupported JSON schemas."""
def __init__(
self,
message: str,
unsupported_field: Optional[str] = None,
cause: Optional[BaseException] = None,
) -> None:
# Normalize message by stripping redundant prefix if present
message = message.removeprefix("Unsupported JSON Schema: ")
self.unsupported_field = unsupported_field
self.cause = cause
super().__init__(message)
def __str__(self) -> str:
base_msg = f"Unsupported JSON Schema: {super().__str__()}"
if self.unsupported_field:
return f"{base_msg} (unsupported field: {self.unsupported_field})"
return base_msg

View File

@@ -1,10 +1,32 @@
# Exports generic type parser from ._type_parser import GenericTypeParser
from ._type_parser import GenericTypeParser as GenericTypeParser from .allof_type_parser import AllOfTypeParser
from .anyof_type_parser import AnyOfTypeParser
from .array_type_parser import ArrayTypeParser
from .boolean_type_parser import BooleanTypeParser
from .const_type_parser import ConstTypeParser
from .enum_type_parser import EnumTypeParser
from .float_type_parser import FloatTypeParser
from .int_type_parser import IntTypeParser
from .null_type_parser import NullTypeParser
from .object_type_parser import ObjectTypeParser
from .oneof_type_parser import OneOfTypeParser
from .ref_type_parser import RefTypeParser
from .string_type_parser import StringTypeParser
# Exports Implementations
from .int_type_parser import IntTypeParser as IntTypeParser __all__ = [
from .object_type_parser import ObjectTypeParser as ObjectTypeParser "GenericTypeParser",
from .string_type_parser import StringTypeParser as StringTypeParser "EnumTypeParser",
from .array_type_parser import ArrayTypeParser as ArrayTypeParser "ConstTypeParser",
from .boolean_type_parser import BooleanTypeParser as BooleanTypeParser "AllOfTypeParser",
from .float_type_parser import FloatTypeParser as FloatTypeParser "AnyOfTypeParser",
"ArrayTypeParser",
"BooleanTypeParser",
"FloatTypeParser",
"IntTypeParser",
"NullTypeParser",
"ObjectTypeParser",
"OneOfTypeParser",
"StringTypeParser",
"RefTypeParser",
]

View File

@@ -1,31 +1,185 @@
from jambo.exceptions import InvalidSchemaException
from jambo.types.type_parser_options import JSONSchema, TypeParserOptions
from pydantic import Field, TypeAdapter
from typing_extensions import Annotated, Any, ClassVar, Generic, Self, TypeVar, Unpack
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import Generic, TypeVar
from typing_extensions import Self
from pydantic import Field
T = TypeVar("T") T = TypeVar("T", bound=type)
class GenericTypeParser(ABC, Generic[T]): class GenericTypeParser(ABC, Generic[T]):
@property json_schema_type: ClassVar[str]
@abstractmethod
def mapped_type(self) -> type[T]: ...
@property type_mappings: dict[str, str] = {}
@abstractmethod
def json_schema_type(self) -> str: ... default_mappings = {
"default": "default",
"description": "description",
"examples": "examples",
"title": "title",
"deprecated": "deprecated",
}
@staticmethod
@abstractmethod @abstractmethod
def from_properties_impl(
self, name: str, properties: JSONSchema, **kwargs: Unpack[TypeParserOptions]
) -> tuple[T, dict]:
"""
Abstract method to convert properties to a type and its fields properties.
:param name: The name of the type.
:param properties: The properties of the type.
:param kwargs: Additional options for type parsing.
:return: A tuple containing the type and its properties.
"""
def from_properties( def from_properties(
name: str, properties: dict[str, any] self, name: str, properties: JSONSchema, **kwargs: Unpack[TypeParserOptions]
) -> tuple[type[T], Field]: ... ) -> tuple[T, dict]:
"""
Converts properties to a type and its fields properties.
:param name: The name of the type.
:param properties: The properties of the type.
:param kwargs: Additional options for type parsing.
:return: A tuple containing the type and its properties.
"""
parsed_type, parsed_properties = self.from_properties_impl(
name, properties, **kwargs
)
if not self._validate_default(parsed_type, parsed_properties):
raise InvalidSchemaException(
"Default value is not valid", invalid_field=name
)
if not self._validate_examples(parsed_type, parsed_properties):
raise InvalidSchemaException(
"Examples values are not valid", invalid_field=name
)
return parsed_type, parsed_properties
@classmethod @classmethod
def get_impl(cls, type_name: str) -> Self: def type_from_properties(
cls, name: str, properties: JSONSchema, **kwargs: Unpack[TypeParserOptions]
) -> tuple[type, dict]:
"""
Factory method to fetch the appropriate type parser based on properties
and generates the equivalent type and fields.
:param name: The name of the type to be created.
:param properties: The properties that define the type.
:param kwargs: Additional options for type parsing.
:return: A tuple containing the type and its properties.
"""
parser = cls._get_impl(cls._normalize_properties(properties))
return parser().from_properties(name=name, properties=properties, **kwargs)
@staticmethod
def _normalize_properties(properties: JSONSchema) -> JSONSchema:
"""
Normalizes the properties dictionary to ensure consistent structure.
:param properties: The properties to be normalized.
"""
type_value = properties.pop("type", None)
if isinstance(type_value, str):
properties["type"] = type_value
return properties
if isinstance(type_value, list) and len(type_value) == 0:
raise InvalidSchemaException(
"Invalid schema: 'type' list cannot be empty",
invalid_field=str(properties),
)
if isinstance(type_value, list) and len(type_value) == 1:
properties["type"] = type_value[0]
return properties
if isinstance(type_value, list):
properties["anyOf"] = [{"type": t} for t in type_value]
return properties
return properties
@classmethod
def _get_impl(cls, properties: JSONSchema) -> type[Self]:
for subcls in cls.__subclasses__(): for subcls in cls.__subclasses__():
if subcls.json_schema_type == type_name: schema_type, schema_value = subcls._get_schema_type()
if schema_type not in properties:
continue
if schema_value is None or schema_value == properties[schema_type]: # type: ignore
return subcls return subcls
raise ValueError(f"Unknown type: {type_name}") raise InvalidSchemaException(
"No suitable type parser found", invalid_field=str(properties)
)
@classmethod
def _get_schema_type(cls) -> tuple[str, str | None]:
if cls.json_schema_type is None:
raise RuntimeError(
f"TypeParser: json_schema_type not defined for subclass {cls.__name__}"
)
schema_definition = cls.json_schema_type.split(":")
if len(schema_definition) == 1:
return schema_definition[0], None
return schema_definition[0], schema_definition[1]
def mappings_properties_builder(
self, properties, **kwargs: Unpack[TypeParserOptions]
) -> dict[str, Any]:
if not kwargs.get("required", False):
properties["default"] = properties.get("default", None)
mappings = self.default_mappings | self.type_mappings
return {
mappings[key]: value for key, value in properties.items() if key in mappings
}
@staticmethod
def _validate_default(field_type: T, field_prop: dict) -> bool:
value = field_prop.get("default")
if value is None and field_prop.get("default_factory") is not None:
value = field_prop["default_factory"]()
if value is None:
return True
return GenericTypeParser._is_valid_value(field_type, field_prop, value)
@staticmethod
def _validate_examples(field_type: T, field_prop: dict) -> bool:
examples = field_prop.get("examples")
if examples is None:
return True
if not isinstance(examples, list):
return False
return all(
GenericTypeParser._is_valid_value(field_type, field_prop, e)
for e in examples
)
@staticmethod
def _is_valid_value(field_type: T, field_prop: dict, value: Any) -> bool:
try:
field = Annotated[field_type, Field(**field_prop)] # type: ignore
TypeAdapter(field).validate_python(value)
except Exception as _:
return False
return True

View File

@@ -0,0 +1,110 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser._type_parser import GenericTypeParser
from jambo.types.json_schema_type import JSONSchema
from jambo.types.type_parser_options import TypeParserOptions
from typing_extensions import Unpack
class AllOfTypeParser(GenericTypeParser):
mapped_type = any
json_schema_type = "allOf"
def from_properties_impl(
self, name: str, properties: JSONSchema, **kwargs: Unpack[TypeParserOptions]
):
sub_properties = properties.get("allOf", [])
root_type = properties.get("type")
if root_type is not None:
for sub_property in sub_properties:
sub_property["type"] = root_type
parser = self._get_type_parser(sub_properties)
combined_properties = self._rebuild_properties_from_subproperties(
sub_properties
)
if (examples := properties.get("examples")) is not None:
combined_properties["examples"] = examples
return parser().from_properties_impl(name, combined_properties, **kwargs)
@staticmethod
def _get_type_parser(
sub_properties: list[JSONSchema],
) -> type[GenericTypeParser]:
if not sub_properties:
raise InvalidSchemaException(
"'allOf' must contain at least one schema", invalid_field="allOf"
)
parsers: set[type[GenericTypeParser]] = set(
GenericTypeParser._get_impl(sub_property) for sub_property in sub_properties
)
if len(parsers) != 1:
raise InvalidSchemaException(
"All sub-schemas in 'allOf' must resolve to the same type",
invalid_field="allOf",
)
return parsers.pop()
@staticmethod
def _rebuild_properties_from_subproperties(
sub_properties: list[JSONSchema],
) -> JSONSchema:
properties: JSONSchema = {}
for subProperty in sub_properties:
for name, prop in subProperty.items():
if name not in properties:
properties[name] = prop # type: ignore
else:
# Merge properties if they exist in both sub-properties
properties[name] = AllOfTypeParser._validate_prop( # type: ignore
name,
properties[name], # type: ignore
prop,
)
return properties
@staticmethod
def _validate_prop(prop_name, old_value, new_value):
if prop_name == "description":
return f"{old_value} | {new_value}"
if prop_name == "default":
if old_value != new_value:
raise InvalidSchemaException(
f"Conflicting defaults for '{prop_name}'", invalid_field=prop_name
)
return old_value
if prop_name == "required":
return old_value + new_value
if prop_name in ("maxLength", "maximum", "exclusiveMaximum"):
return old_value if old_value > new_value else new_value
if prop_name in ("minLength", "minimum", "exclusiveMinimum"):
return old_value if old_value < new_value else new_value
if prop_name == "properties":
for key, value in new_value.items():
if key not in old_value:
old_value[key] = value
continue
for sub_key, sub_value in value.items():
if sub_key not in old_value[key]:
old_value[key][sub_key] = sub_value
else:
# Merge properties if they exist in both sub-properties
old_value[key][sub_key] = AllOfTypeParser._validate_prop(
sub_key, old_value[key][sub_key], sub_value
)
# Handle other properties by just returning the first valued
return old_value

View File

@@ -0,0 +1,53 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser._type_parser import GenericTypeParser
from jambo.types.type_parser_options import TypeParserOptions
from pydantic import Field
from typing_extensions import Annotated, Union, Unpack
class AnyOfTypeParser(GenericTypeParser):
mapped_type = Union
json_schema_type = "anyOf"
def from_properties_impl(
self, name, properties, **kwargs: Unpack[TypeParserOptions]
):
if "anyOf" not in properties:
raise InvalidSchemaException(
f"AnyOf type {name} must have 'anyOf' property defined.",
invalid_field="anyOf",
)
if not isinstance(properties["anyOf"], list):
raise InvalidSchemaException(
"AnyOf must be a list of types.", invalid_field="anyOf"
)
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
sub_properties = properties["anyOf"]
sub_types = [
GenericTypeParser.type_from_properties(
f"{name}.sub{i}", subProperty, **kwargs
)
for i, subProperty in enumerate(sub_properties)
]
if not kwargs.get("required", False):
mapped_properties["default"] = mapped_properties.get("default")
# By defining the type as Union of Annotated type we can use the Field validator
# to enforce the constraints of each union type when needed.
# We use Annotated to attach the Field validators to the type.
field_types = []
for subType, subProp in sub_types:
default_value = subProp.pop("default", None)
if default_value is None:
default_value = ...
field_types.append(Annotated[subType, Field(default_value, **subProp)])
return Union[(*field_types,)], mapped_properties

View File

@@ -1,67 +1,68 @@
import copy from jambo.exceptions import InvalidSchemaException
from jambo.parser._type_parser import GenericTypeParser from jambo.parser._type_parser import GenericTypeParser
from jambo.types.type_parser_options import TypeParserOptions
from typing import TypeVar from typing_extensions import (
Iterable,
from jambo.utils.properties_builder.mappings_properties_builder import ( Unpack,
mappings_properties_builder,
) )
V = TypeVar("V") import copy
class ArrayTypeParser(GenericTypeParser): class ArrayTypeParser(GenericTypeParser):
mapped_type = list mapped_type = list
json_schema_type = "array" json_schema_type = "type:array"
@classmethod type_mappings = {
def from_properties(cls, name, properties): "maxItems": "max_length",
_item_type, _item_args = GenericTypeParser.get_impl( "minItems": "min_length",
properties["items"]["type"] }
).from_properties(name, properties["items"])
_mappings = { def from_properties_impl(
"maxItems": "max_length", self, name, properties, **kwargs: Unpack[TypeParserOptions]
"minItems": "min_length", ):
} item_properties = kwargs.copy()
item_properties["required"] = True
wrapper_type = set if properties.get("uniqueItems", False) else list if (items := properties.get("items")) is None:
raise InvalidSchemaException(
f"Array type {name} must have 'items' property defined.",
invalid_field="items",
)
mapped_properties = mappings_properties_builder( _item_type, _item_args = GenericTypeParser.type_from_properties(
properties, _mappings, {"description": "description"} name, items, **item_properties
) )
if "default" in properties: wrapper_type = set if properties.get("uniqueItems", False) else list
default_list = properties["default"] field_type = wrapper_type[_item_type]
if not isinstance(default_list, list):
raise ValueError(
f"Default value must be a list, got {type(default_list).__name__}"
)
if len(default_list) > properties.get("maxItems", float("inf")): mapped_properties = self.mappings_properties_builder(properties, **kwargs)
raise ValueError(
f"Default list exceeds maxItems limit of {properties.get('maxItems')}"
)
if len(default_list) < properties.get("minItems", 0): if (
raise ValueError( default_value := mapped_properties.pop("default", None)
f"Default list is below minItems limit of {properties.get('minItems')}" ) is not None or not kwargs.get("required", False):
) mapped_properties["default_factory"] = self._build_default_factory(
default_value, wrapper_type
)
if not all(isinstance(item, _item_type) for item in default_list): if (example_values := mapped_properties.pop("examples", None)) is not None:
raise ValueError( mapped_properties["examples"] = [
f"All items in the default list must be of type {_item_type.__name__}" wrapper_type(example) for example in example_values
) ]
if wrapper_type is list: return field_type, mapped_properties
mapped_properties["default_factory"] = lambda: copy.deepcopy(
wrapper_type(default_list)
)
else:
mapped_properties["default_factory"] = lambda: wrapper_type(
default_list
)
return wrapper_type[_item_type], mapped_properties def _build_default_factory(self, default_list, wrapper_type):
if default_list is None:
return lambda: None
if not isinstance(default_list, Iterable):
raise InvalidSchemaException(
f"Default value for array must be an iterable, got {type(default_list)}",
invalid_field="default",
)
return lambda: copy.deepcopy(wrapper_type(default_list))

View File

@@ -1,17 +1,29 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser._type_parser import GenericTypeParser from jambo.parser._type_parser import GenericTypeParser
from jambo.utils.properties_builder.mappings_properties_builder import ( from jambo.types.type_parser_options import TypeParserOptions
mappings_properties_builder,
) from typing_extensions import Unpack
class BooleanTypeParser(GenericTypeParser): class BooleanTypeParser(GenericTypeParser):
mapped_type = bool mapped_type = bool
json_schema_type = "boolean" json_schema_type = "type:boolean"
@staticmethod type_mappings = {
def from_properties(name, properties): "default": "default",
_mappings = { }
"default": "default",
} def from_properties_impl(
return bool, mappings_properties_builder(properties, _mappings) self, name, properties, **kwargs: Unpack[TypeParserOptions]
):
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
default_value = properties.get("default")
if default_value is not None and not isinstance(default_value, bool):
raise InvalidSchemaException(
f"Default value for {name} must be a boolean.",
invalid_field="default",
)
return bool, mapped_properties

View File

@@ -0,0 +1,57 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser._type_parser import GenericTypeParser
from jambo.types.json_schema_type import JSONSchemaNativeTypes
from jambo.types.type_parser_options import TypeParserOptions
from pydantic import AfterValidator
from typing_extensions import Annotated, Any, Literal, Unpack
class ConstTypeParser(GenericTypeParser):
json_schema_type = "const"
default_mappings = {
"const": "default",
"description": "description",
"examples": "examples",
}
def from_properties_impl(
self, name, properties, **kwargs: Unpack[TypeParserOptions]
):
if "const" not in properties:
raise InvalidSchemaException(
f"Const type {name} must have 'const' property defined.",
invalid_field="const",
)
const_value = properties["const"]
if not isinstance(const_value, JSONSchemaNativeTypes):
raise InvalidSchemaException(
f"Const type {name} must have 'const' value of allowed types: {JSONSchemaNativeTypes}.",
invalid_field="const",
)
const_type = self._build_const_type(const_value)
parsed_properties = self.mappings_properties_builder(properties, **kwargs)
return const_type, parsed_properties
def _build_const_type(self, const_value):
# Try to use Literal for hashable types (required for discriminated unions)
# Fall back to validator approach for non-hashable types
try:
# Test if the value is hashable (can be used in Literal)
hash(const_value)
return Literal[const_value]
except TypeError:
# Non-hashable type (like list, dict), use validator approach
def _validate_const_value(value: Any) -> Any:
if value != const_value:
raise ValueError(
f"Value must be equal to the constant value: {const_value}"
)
return value
return Annotated[type(const_value), AfterValidator(_validate_const_value)]

View File

@@ -0,0 +1,49 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser._type_parser import GenericTypeParser
from jambo.types.json_schema_type import JSONSchemaNativeTypes
from jambo.types.type_parser_options import JSONSchema, TypeParserOptions
from typing_extensions import Unpack
from enum import Enum
class EnumTypeParser(GenericTypeParser):
json_schema_type = "enum"
def from_properties_impl(
self, name: str, properties: JSONSchema, **kwargs: Unpack[TypeParserOptions]
):
if "enum" not in properties:
raise InvalidSchemaException(
f"Enum type {name} must have 'enum' property defined.",
invalid_field="enum",
)
enum_values = properties["enum"]
if not isinstance(enum_values, list):
raise InvalidSchemaException(
f"Enum type {name} must have 'enum' as a list of values.",
invalid_field="enum",
)
if any(not isinstance(value, JSONSchemaNativeTypes) for value in enum_values):
raise InvalidSchemaException(
f"Enum type {name} must have 'enum' values of allowed types: {JSONSchemaNativeTypes}.",
invalid_field="enum",
)
# Create a new Enum type dynamically
enum_type = Enum(name, {str(value).upper(): value for value in enum_values}) # type: ignore
parsed_properties = self.mappings_properties_builder(properties, **kwargs)
if "default" in parsed_properties and parsed_properties["default"] is not None:
parsed_properties["default"] = enum_type(parsed_properties["default"])
if "examples" in parsed_properties:
parsed_properties["examples"] = [
enum_type(example) for example in parsed_properties["examples"]
]
return enum_type, parsed_properties

View File

@@ -1,12 +1,24 @@
from jambo.parser._type_parser import GenericTypeParser from jambo.parser._type_parser import GenericTypeParser
from jambo.utils.properties_builder.numeric_properties_builder import numeric_properties_builder from jambo.types.type_parser_options import TypeParserOptions
from typing_extensions import Unpack
class FloatTypeParser(GenericTypeParser): class FloatTypeParser(GenericTypeParser):
mapped_type = float mapped_type = float
json_schema_type = "number" json_schema_type = "type:number"
@staticmethod type_mappings = {
def from_properties(name, properties): "minimum": "ge",
return float, numeric_properties_builder(properties) "exclusiveMinimum": "gt",
"maximum": "le",
"exclusiveMaximum": "lt",
"multipleOf": "multiple_of",
"default": "default",
}
def from_properties_impl(
self, name, properties, **kwargs: Unpack[TypeParserOptions]
):
return float, self.mappings_properties_builder(properties, **kwargs)

View File

@@ -1,12 +1,24 @@
from jambo.parser._type_parser import GenericTypeParser from jambo.parser._type_parser import GenericTypeParser
from jambo.utils.properties_builder.numeric_properties_builder import numeric_properties_builder from jambo.types.type_parser_options import TypeParserOptions
from typing_extensions import Unpack
class IntTypeParser(GenericTypeParser): class IntTypeParser(GenericTypeParser):
mapped_type = int mapped_type = int
json_schema_type = "integer" json_schema_type = "type:integer"
@staticmethod type_mappings = {
def from_properties(name, properties): "minimum": "ge",
return int, numeric_properties_builder(properties) "exclusiveMinimum": "gt",
"maximum": "le",
"exclusiveMaximum": "lt",
"multipleOf": "multiple_of",
"default": "default",
}
def from_properties_impl(
self, name, properties, **kwargs: Unpack[TypeParserOptions]
):
return int, self.mappings_properties_builder(properties, **kwargs)

View File

@@ -0,0 +1,18 @@
from jambo.parser._type_parser import GenericTypeParser
from jambo.types.type_parser_options import TypeParserOptions
from typing_extensions import Unpack
class NullTypeParser(GenericTypeParser):
mapped_type = type(None)
json_schema_type = "type:null"
def from_properties_impl(
self, name, properties, **kwargs: Unpack[TypeParserOptions]
):
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
mapped_properties["default"] = None
return self.mapped_type, mapped_properties

View File

@@ -1,19 +1,104 @@
from jambo.exceptions import InternalAssertionException
from jambo.parser._type_parser import GenericTypeParser from jambo.parser._type_parser import GenericTypeParser
from jambo.types.json_schema_type import JSONSchema
from jambo.types.type_parser_options import TypeParserOptions
from pydantic import BaseModel, ConfigDict, Field, create_model
from pydantic.fields import FieldInfo
from typing_extensions import Unpack
import warnings
class ObjectTypeParser(GenericTypeParser): class ObjectTypeParser(GenericTypeParser):
mapped_type = object mapped_type = object
json_schema_type = "object" json_schema_type = "type:object"
@staticmethod def from_properties_impl(
def from_properties(name, properties): self, name: str, properties: JSONSchema, **kwargs: Unpack[TypeParserOptions]
from jambo.schema_converter import SchemaConverter ) -> tuple[type[BaseModel], dict]:
type_parsing = self.to_model(
if "default" in properties: name,
raise RuntimeError("Default values for objects are not supported.") properties.get("properties", {}),
properties.get("required", []),
return ( **kwargs,
SchemaConverter.build_object(name, properties),
{}, # The second argument is not used in this case
) )
type_properties = self.mappings_properties_builder(properties, **kwargs)
if (
default_value := type_properties.pop("default", None)
) is not None or not kwargs.get("required", False):
type_properties["default_factory"] = (
lambda: type_parsing.model_validate(default_value)
if default_value is not None
else None
)
if (example_values := type_properties.pop("examples", None)) is not None:
type_properties["examples"] = [
type_parsing.model_validate(example) for example in example_values
]
return type_parsing, type_properties
@classmethod
def to_model(
cls,
name: str,
properties: dict[str, JSONSchema],
required_keys: list[str],
**kwargs: Unpack[TypeParserOptions],
) -> type[BaseModel]:
"""
Converts JSON Schema object properties to a Pydantic model.
:param name: The name of the model.
:param properties: The properties of the JSON Schema object.
:param required_keys: List of required keys in the schema.
:return: A Pydantic model class.
"""
ref_cache = kwargs.get("ref_cache")
if ref_cache is None:
raise InternalAssertionException(
"`ref_cache` must be provided in kwargs for ObjectTypeParser"
)
if (model := ref_cache.get(name)) is not None and isinstance(model, type):
warnings.warn(
f"Type '{name}' is already in the ref_cache and therefore cached value will be used."
" This may indicate a namming collision in the schema or just a normal optimization,"
" if this behavior is desired pass a clean ref_cache or use the param `without_cache`"
)
return model
model_config = ConfigDict(validate_assignment=True)
fields = cls._parse_properties(name, properties, required_keys, **kwargs)
model = create_model(name, __config__=model_config, **fields) # type: ignore
ref_cache[name] = model
return model
@classmethod
def _parse_properties(
cls,
name: str,
properties: dict[str, JSONSchema],
required_keys: list[str],
**kwargs: Unpack[TypeParserOptions],
) -> dict[str, tuple[type, FieldInfo]]:
required_keys = required_keys or []
fields = {}
for field_name, field_prop in properties.items():
sub_property: TypeParserOptions = kwargs.copy()
sub_property["required"] = field_name in required_keys
parsed_type, parsed_properties = GenericTypeParser.type_from_properties(
f"{name}.{field_name}",
field_prop,
**sub_property, # type: ignore
)
fields[field_name] = (parsed_type, Field(**parsed_properties))
return fields

View File

@@ -0,0 +1,114 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser._type_parser import GenericTypeParser
from jambo.types.type_parser_options import TypeParserOptions
from pydantic import BaseModel, BeforeValidator, Field, TypeAdapter, ValidationError
from typing_extensions import Annotated, Any, Union, Unpack, get_args
Annotation = Annotated[Any, ...]
class OneOfTypeParser(GenericTypeParser):
mapped_type = Union
json_schema_type = "oneOf"
def from_properties_impl(
self, name, properties, **kwargs: Unpack[TypeParserOptions]
):
if "oneOf" not in properties:
raise InvalidSchemaException(
f"Invalid JSON Schema: {properties}", invalid_field="oneOf"
)
if not isinstance(properties["oneOf"], list) or len(properties["oneOf"]) == 0:
raise InvalidSchemaException(
f"Invalid JSON Schema: {properties['oneOf']}", invalid_field="oneOf"
)
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
sub_types = [
GenericTypeParser.type_from_properties(
f"{name}_sub{i}", subProperty, **kwargs
)
for i, subProperty in enumerate(properties["oneOf"])
]
if not kwargs.get("required", False):
mapped_properties["default"] = mapped_properties.get("default")
subfield_types = [Annotated[t, Field(**v)] for t, v in sub_types]
# Added with the understanding of discriminator are not in the JsonSchema Spec,
# they were added by OpenAPI and not all implementations may support them,
# and they do not always generate a model one-to-one to the Pydantic model
# TL;DR: Discriminators were added by OpenAPI and not a Official JSON Schema feature
if (discriminator := properties.get("discriminator")) is not None:
validated_type = self._build_type_one_of_with_discriminator(
subfield_types, discriminator
)
else:
validated_type = self._build_type_one_of_with_func(subfield_types)
return validated_type, mapped_properties
@staticmethod
def _build_type_one_of_with_discriminator(
subfield_types: list[Annotation], discriminator_prop: dict
) -> Annotation:
"""
Build a type with a discriminator.
"""
if not isinstance(discriminator_prop, dict):
raise InvalidSchemaException(
"Discriminator must be a dictionary", invalid_field="discriminator"
)
for field in subfield_types:
field_type, field_info = get_args(field)
if issubclass(field_type, BaseModel):
continue
raise InvalidSchemaException(
"When using a discriminator, all subfield types must be of type 'object'.",
invalid_field="discriminator",
)
property_name = discriminator_prop.get("propertyName")
if property_name is None or not isinstance(property_name, str):
raise InvalidSchemaException(
"Discriminator must have a 'propertyName' key",
invalid_field="propertyName",
)
return Annotated[Union[(*subfield_types,)], Field(discriminator=property_name)]
@staticmethod
def _build_type_one_of_with_func(subfield_types: list[Annotation]) -> Annotation:
"""
Build a type with a validation function for the oneOf constraint.
"""
def validate_one_of(value: Any) -> Any:
matched_count = 0
for field_type in subfield_types:
try:
TypeAdapter(field_type).validate_python(value)
matched_count += 1
except ValidationError:
continue
if matched_count == 0:
raise ValueError("Value does not match any of the oneOf schemas")
elif matched_count > 1:
raise ValueError(
"Value matches multiple oneOf schemas, exactly one expected"
)
return value
return Annotated[Union[(*subfield_types,)], BeforeValidator(validate_one_of)]

View File

@@ -0,0 +1,135 @@
from jambo.exceptions import InternalAssertionException, InvalidSchemaException
from jambo.parser import GenericTypeParser
from jambo.types import RefCacheDict
from jambo.types.json_schema_type import JSONSchema
from jambo.types.type_parser_options import TypeParserOptions
from typing_extensions import ForwardRef, Literal, Union, Unpack
RefType = Union[type, ForwardRef]
RefStrategy = Literal["forward_ref", "def_ref"]
class RefTypeParser(GenericTypeParser):
json_schema_type = "$ref"
def from_properties_impl(
self, name: str, properties: JSONSchema, **kwargs: Unpack[TypeParserOptions]
) -> tuple[RefType, dict]:
if "$ref" not in properties:
raise InvalidSchemaException(
f"Missing $ref in properties for {name}", invalid_field="$ref"
)
if kwargs.get("context") is None:
raise InternalAssertionException(
"`context` must be provided in kwargs for RefTypeParser"
)
ref_cache = kwargs.get("ref_cache")
if ref_cache is None:
raise InternalAssertionException(
"`ref_cache` must be provided in kwargs for RefTypeParser"
)
mapped_properties = self.mappings_properties_builder(properties, **kwargs)
ref_strategy, ref_name, ref_property = self._examine_ref_strategy(
name, properties, **kwargs
)
ref_state = self._get_ref_from_cache(ref_name, ref_cache)
if ref_state is not None:
# If the reference is either processing or already cached
return ref_state, mapped_properties
ref = self._parse_from_strategy(ref_strategy, ref_name, ref_property, **kwargs)
ref_cache[ref_name] = ref
return ref, mapped_properties
def _parse_from_strategy(
self,
ref_strategy: RefStrategy,
ref_name: str,
ref_property: JSONSchema,
**kwargs: Unpack[TypeParserOptions],
) -> RefType:
mapped_type: RefType
match ref_strategy:
case "forward_ref":
mapped_type = ForwardRef(ref_name)
case "def_ref":
mapped_type, _ = GenericTypeParser.type_from_properties(
ref_name, ref_property, **kwargs
)
case _:
raise InvalidSchemaException(
f"Unsupported $ref {ref_property['$ref']}", invalid_field="$ref"
)
return mapped_type
def _get_ref_from_cache(
self, ref_name: str, ref_cache: RefCacheDict
) -> RefType | type | None:
try:
ref_state = ref_cache[ref_name]
if ref_state is None:
# If the reference is being processed, we return a ForwardRef
return ForwardRef(ref_name)
# If the reference is already cached, we return it
return ref_state
except KeyError:
# If the reference is not in the cache, we will set it to None
ref_cache[ref_name] = None
return None
def _examine_ref_strategy(
self, name: str, properties: JSONSchema, **kwargs: Unpack[TypeParserOptions]
) -> tuple[RefStrategy, str, JSONSchema]:
if properties.get("$ref") == "#":
ref_name = kwargs["context"].get("title")
if ref_name is None:
raise InvalidSchemaException(
"Missing title in properties for $ref of Root Reference",
invalid_field="title",
)
return "forward_ref", ref_name, {}
if properties.get("$ref", "").startswith("#/$defs/"):
target_name, target_property = self._extract_target_ref(
name, properties, **kwargs
)
return "def_ref", target_name, target_property
raise InvalidSchemaException(
"Only Root and $defs references are supported at the moment",
invalid_field="$ref",
)
def _extract_target_ref(
self, name: str, properties: JSONSchema, **kwargs: Unpack[TypeParserOptions]
) -> tuple[str, JSONSchema]:
target_name = None
target_property = kwargs["context"]
for prop_name in properties["$ref"].split("/")[1:]:
if prop_name not in target_property:
raise InvalidSchemaException(
f"Missing {prop_name} in properties for $ref {properties['$ref']}",
invalid_field=prop_name,
)
target_name = prop_name
target_property = target_property[prop_name] # type: ignore
if not isinstance(target_name, str) or target_property is None:
raise InvalidSchemaException(
f"Invalid $ref {properties['$ref']}", invalid_field="$ref"
)
return target_name, target_property

View File

@@ -1,40 +1,98 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser._type_parser import GenericTypeParser from jambo.parser._type_parser import GenericTypeParser
from jambo.utils.properties_builder.mappings_properties_builder import ( from jambo.types.type_parser_options import TypeParserOptions
mappings_properties_builder,
) from pydantic import AnyUrl, EmailStr
from typing_extensions import Any, Unpack
from datetime import date, datetime, time, timedelta
from ipaddress import IPv4Address, IPv6Address
from uuid import UUID
class StringTypeParser(GenericTypeParser): class StringTypeParser(GenericTypeParser):
mapped_type = str mapped_type = str
json_schema_type = "string" json_schema_type = "type:string"
@staticmethod type_mappings = {
def from_properties(name, properties): "maxLength": "max_length",
_mappings = { "minLength": "min_length",
"maxLength": "max_length", "pattern": "pattern",
"minLength": "min_length", }
"pattern": "pattern",
}
mapped_properties = mappings_properties_builder(properties, _mappings) format_type_mapping = {
# [7.3.1](https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.7.3.1). Dates, Times, and Duration
"date": date,
"time": time,
"date-time": datetime,
"duration": timedelta,
# [7.3.2](https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.7.3.2). Email Addresses
"email": EmailStr,
# [7.3.3](https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.7.3.3). Hostnames
"hostname": str,
# [7.3.4](https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.7.3.4). IP Addresses
"ipv4": IPv4Address,
"ipv6": IPv6Address,
# [7.3.5](https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-00#rfc.section.7.3.5). Resource Identifiers
"uri": AnyUrl,
# "iri" # Not supported by pydantic and currently not supported by jambo
"uuid": UUID,
}
if "default" in properties: format_pattern_mapping = {
default_value = properties["default"] "hostname": r"^[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?)*$",
if not isinstance(default_value, str): }
raise ValueError(
f"Default value for {name} must be a string, "
f"but got {type(properties['default'])}."
)
if len(default_value) > properties.get("maxLength", float("inf")): def from_properties_impl(
raise ValueError( self, name, properties, **kwargs: Unpack[TypeParserOptions]
f"Default value for {name} exceeds maxLength limit of {properties.get('maxLength')}" ):
) mapped_properties = self.mappings_properties_builder(properties, **kwargs)
if len(default_value) < properties.get("minLength", 0): format_type = properties.get("format")
raise ValueError( if not format_type:
f"Default value for {name} is below minLength limit of {properties.get('minLength')}" return str, mapped_properties
)
return str, mapped_properties if format_type not in self.format_type_mapping:
raise InvalidSchemaException(
f"Unsupported string format: {format_type}", invalid_field="format"
)
mapped_type = self.format_type_mapping[format_type]
if format_type in self.format_pattern_mapping:
mapped_properties["pattern"] = self.format_pattern_mapping[format_type]
if "examples" in mapped_properties:
mapped_properties["examples"] = [
self._parse_example(example, format_type, mapped_type)
for example in mapped_properties["examples"]
]
if "json_schema_extra" not in mapped_properties:
mapped_properties["json_schema_extra"] = {}
mapped_properties["json_schema_extra"]["format"] = format_type
return mapped_type, mapped_properties
def _parse_example(
self, example: Any, format_type: str, mapped_type: type[Any]
) -> Any:
"""
Parse example from JSON Schema format to python format
:param example: Example Value
:param format_type: Format Type
:param mapped_type: Type to parse
:return: Example parsed
"""
match format_type:
case "date" | "time" | "date-time":
return mapped_type.fromisoformat(example)
case "duration":
# TODO: Implement duration parser
raise NotImplementedError
case "ipv4" | "ipv6":
return mapped_type(example)
case "uuid":
return mapped_type(example)
case _:
return example

View File

@@ -1,13 +1,11 @@
from jambo.parser import GenericTypeParser from jambo.exceptions import InvalidSchemaException, UnsupportedSchemaException
from jambo.parser import ObjectTypeParser, RefTypeParser
from jambo.types import JSONSchema, RefCacheDict
from jsonschema.exceptions import SchemaError from jsonschema.exceptions import SchemaError
from jsonschema.protocols import Validator from jsonschema.validators import validator_for
from pydantic import create_model from pydantic import BaseModel
from pydantic.fields import Field from typing_extensions import Optional
from typing import Type
from jambo.types.json_schema_type import JSONSchema
class SchemaConverter: class SchemaConverter:
@@ -19,81 +17,142 @@ class SchemaConverter:
fields and types. The generated model can be used for data validation and serialization. fields and types. The generated model can be used for data validation and serialization.
""" """
@staticmethod def __init__(
def build(schema: JSONSchema) -> Type: self, namespace_registry: Optional[dict[str, RefCacheDict]] = None
) -> None:
if namespace_registry is None:
namespace_registry = dict()
self._namespace_registry = namespace_registry
def build_with_cache(
self,
schema: JSONSchema,
ref_cache: Optional[RefCacheDict] = None,
without_cache: bool = False,
) -> type[BaseModel]:
""" """
Converts a JSON Schema to a Pydantic model. Converts a JSON Schema to a Pydantic model.
:param schema: The JSON Schema to convert. This is the instance method version of `build` and uses the instance's reference cache if none is provided.
:return: A Pydantic model class. Use this method if you want to utilize the instance's reference cache.
"""
if "title" not in schema:
raise ValueError("JSON Schema must have a title.")
return SchemaConverter.build_object(schema["title"], schema) :param schema: The JSON Schema to convert.
:param ref_cache: An optional reference cache to use during conversion.
:param without_cache: Whether to use a clean reference cache for this conversion.
:return: The generated Pydantic model.
"""
local_ref_cache: RefCacheDict
if without_cache:
local_ref_cache = dict()
elif ref_cache is None:
namespace = schema.get("$id", "default")
local_ref_cache = self._namespace_registry.setdefault(namespace, dict())
else:
local_ref_cache = ref_cache
return self.build(schema, local_ref_cache)
@staticmethod @staticmethod
def build_object( def build(
name: str, schema: JSONSchema, ref_cache: Optional[RefCacheDict] = None
schema: JSONSchema, ) -> type[BaseModel]:
) -> Type:
""" """
Converts a JSON Schema object to a Pydantic model given a name. Converts a JSON Schema to a Pydantic model.
:param name: This method doesn't use a reference cache if none is provided.
:param schema: :param schema: The JSON Schema to convert.
:return: :param ref_cache: An optional reference cache to use during conversion, if provided `with_clean_cache` will be ignored.
:return: The generated Pydantic model.
""" """
if ref_cache is None:
ref_cache = dict()
try: try:
Validator.check_schema(schema) validator = validator_for(schema)
except SchemaError as e: validator.check_schema(schema) # type: ignore
raise ValueError(f"Invalid JSON Schema: {e}") except SchemaError as err:
raise InvalidSchemaException(
"Validation of JSON Schema failed.", cause=err
) from err
if schema["type"] != "object": if "title" not in schema:
raise TypeError( raise InvalidSchemaException(
f"Invalid JSON Schema: {schema['type']}. Only 'object' can be converted to Pydantic models." "Schema must have a title.", invalid_field="title"
) )
return SchemaConverter._build_model_from_properties( schema_type = SchemaConverter._get_schema_type(schema)
name, schema["properties"], schema.get("required", [])
) match schema_type:
case "object":
return ObjectTypeParser.to_model(
schema["title"],
schema.get("properties", {}),
schema.get("required", []),
context=schema,
ref_cache=ref_cache,
required=True,
)
case "$ref":
parsed_model, _ = RefTypeParser().from_properties(
schema["title"],
schema,
context=schema,
ref_cache=ref_cache,
required=True,
)
return parsed_model
case _:
unsupported_type = (
f"type:{schema_type}" if schema_type else "missing type"
)
raise UnsupportedSchemaException(
"Only object and $ref schema types are supported.",
unsupported_field=unsupported_type,
)
def clear_ref_cache(self, namespace: Optional[str] = "default") -> None:
"""
Clears the reference cache.
"""
if namespace is None:
self._namespace_registry.clear()
return
if namespace in self._namespace_registry:
self._namespace_registry[namespace].clear()
def get_cached_ref(
self, ref_name: str, namespace: str = "default"
) -> Optional[type]:
"""
Gets a cached reference from the reference cache.
:param ref_name: The name of the reference to get.
:return: The cached reference, or None if not found.
"""
cached_type = self._namespace_registry.get(
namespace, {}
).get(ref_name)
if isinstance(cached_type, type):
return cached_type
return None
@staticmethod @staticmethod
def _build_model_from_properties( def _get_schema_type(schema: JSONSchema) -> str | None:
model_name: str, model_properties: dict, required_keys: list[str] """
) -> Type: Returns the type of the schema.
properties = SchemaConverter._parse_properties(model_properties, required_keys) :param schema: The JSON Schema to check.
:return: The type of the schema.
"""
if "$ref" in schema:
return "$ref"
return create_model(model_name, **properties) type_value = schema.get("type")
if isinstance(type_value, list):
raise InvalidSchemaException(
"Invalid schema: 'type' cannot be a list at the top level",
invalid_field=str(schema),
)
@staticmethod return type_value
def _parse_properties(
properties: dict, required_keys=None
) -> dict[str, tuple[type, Field]]:
required_keys = required_keys or []
fields = {}
for name, prop in properties.items():
fields[name] = SchemaConverter._build_field(name, prop, required_keys)
return fields
@staticmethod
def _build_field(
name, properties: dict, required_keys: list[str]
) -> tuple[type, dict]:
_field_type, _field_args = GenericTypeParser.get_impl(
properties["type"]
).from_properties(name, properties)
_field_args = _field_args or {}
if description := properties.get("description"):
_field_args["description"] = description
if name not in required_keys:
_field_args["default"] = properties.get("default", None)
if "default_factory" in _field_args and "default" in _field_args:
del _field_args["default"]
return _field_type, Field(**_field_args)

View File

@@ -0,0 +1,17 @@
from .json_schema_type import (
JSONSchema,
JSONSchemaNativeTypes,
JSONSchemaType,
JSONType,
)
from .type_parser_options import RefCacheDict, TypeParserOptions
__all__ = [
"JSONSchemaType",
"JSONSchemaNativeTypes",
"JSONType",
"JSONSchema",
"RefCacheDict",
"TypeParserOptions",
]

View File

@@ -1,80 +1,80 @@
from typing import List, Dict, Union, TypedDict, Literal from __future__ import annotations
from typing_extensions import (
Dict,
List,
Literal,
TypedDict,
Union,
)
from types import NoneType
# Primitive JSON types
JSONSchemaType = Literal[ JSONSchemaType = Literal[
"string", "number", "integer", "boolean", "object", "array", "null" "string", "number", "integer", "boolean", "object", "array", "null"
] ]
JSONSchemaNativeTypes: tuple[type, ...] = (
str,
float,
int,
bool,
list,
set,
NoneType,
)
JSONType = Union[str, int, float, bool, None, Dict[str, "JSONType"], List["JSONType"]] JSONType = Union[str, int, float, bool, None, Dict[str, "JSONType"], List["JSONType"]]
# Dynamically define TypedDict with JSON Schema keywords
class JSONSchema(TypedDict, total=False): JSONSchema = TypedDict(
# Basic metadata "JSONSchema",
title: str {
description: str "$id": str,
default: JSONType "$schema": str,
examples: List[JSONType] "$ref": str,
"$anchor": str,
# Type definitions "$comment": str,
type: Union[JSONSchemaType, List[JSONSchemaType]] "$defs": Dict[str, "JSONSchema"],
"title": str,
# Object-specific keywords "description": str,
properties: Dict[str, "JSONSchema"] "default": JSONType,
required: List[str] "examples": List[JSONType],
additionalProperties: Union[bool, "JSONSchema"] "type": JSONSchemaType | List[JSONSchemaType],
minProperties: int "enum": List[JSONType],
maxProperties: int "const": JSONType,
patternProperties: Dict[str, "JSONSchema"] "properties": Dict[str, "JSONSchema"],
dependencies: Dict[str, Union[List[str], "JSONSchema"]] "patternProperties": Dict[str, "JSONSchema"],
"additionalProperties": Union[bool, "JSONSchema"],
# Array-specific keywords "required": List[str],
items: Union["JSONSchema", List["JSONSchema"]] "minProperties": int,
additionalItems: Union[bool, "JSONSchema"] "maxProperties": int,
minItems: int "dependencies": Dict[str, Union[List[str], "JSONSchema"]],
maxItems: int "items": "JSONSchema",
uniqueItems: bool "prefixItems": List["JSONSchema"],
"additionalItems": Union[bool, "JSONSchema"],
# String-specific keywords "contains": "JSONSchema",
minLength: int "minItems": int,
maxLength: int "maxItems": int,
pattern: str "uniqueItems": bool,
format: str "minLength": int,
"maxLength": int,
# Number-specific keywords "pattern": str,
minimum: float "format": str,
maximum: float "minimum": float,
exclusiveMinimum: float "maximum": float,
exclusiveMaximum: float "exclusiveMinimum": Union[bool, float],
multipleOf: float "exclusiveMaximum": Union[bool, float],
"multipleOf": float,
# Enum and const "if": "JSONSchema",
enum: List[JSONType] "then": "JSONSchema",
const: JSONType "else": "JSONSchema",
"allOf": List["JSONSchema"],
# Conditionals "anyOf": List["JSONSchema"],
if_: "JSONSchema" # 'if' is a reserved word in Python "oneOf": List["JSONSchema"],
then: "JSONSchema" "not": "JSONSchema",
else_: "JSONSchema" # 'else' is also a reserved word },
total=False, # all fields optional
# Combination keywords )
allOf: List["JSONSchema"]
anyOf: List["JSONSchema"]
oneOf: List["JSONSchema"]
not_: "JSONSchema" # 'not' is a reserved word
# Fix forward references
JSONSchema.__annotations__["properties"] = Dict[str, JSONSchema]
JSONSchema.__annotations__["items"] = Union[JSONSchema, List[JSONSchema]]
JSONSchema.__annotations__["additionalItems"] = Union[bool, JSONSchema]
JSONSchema.__annotations__["additionalProperties"] = Union[bool, JSONSchema]
JSONSchema.__annotations__["patternProperties"] = Dict[str, JSONSchema]
JSONSchema.__annotations__["dependencies"] = Dict[str, Union[List[str], JSONSchema]]
JSONSchema.__annotations__["if_"] = JSONSchema
JSONSchema.__annotations__["then"] = JSONSchema
JSONSchema.__annotations__["else_"] = JSONSchema
JSONSchema.__annotations__["allOf"] = List[JSONSchema]
JSONSchema.__annotations__["anyOf"] = List[JSONSchema]
JSONSchema.__annotations__["oneOf"] = List[JSONSchema]
JSONSchema.__annotations__["not_"] = JSONSchema

View File

@@ -0,0 +1,12 @@
from jambo.types.json_schema_type import JSONSchema
from typing_extensions import ForwardRef, MutableMapping, TypedDict
RefCacheDict = MutableMapping[str, ForwardRef | type | None]
class TypeParserOptions(TypedDict):
required: bool
context: JSONSchema
ref_cache: RefCacheDict

View File

@@ -1,11 +0,0 @@
def mappings_properties_builder(properties, mappings, default_mappings=None):
default_mappings = default_mappings or {
"default": "default",
"description": "description",
}
mappings = default_mappings | mappings
return {
mappings[key]: value for key, value in properties.items() if key in mappings
}

View File

@@ -1,51 +0,0 @@
from jambo.utils.properties_builder.mappings_properties_builder import (
mappings_properties_builder,
)
def numeric_properties_builder(properties):
_mappings = {
"minimum": "ge",
"exclusiveMinimum": "gt",
"maximum": "le",
"exclusiveMaximum": "lt",
"multipleOf": "multiple_of",
"default": "default",
}
mapped_properties = mappings_properties_builder(properties, _mappings)
if "default" in properties:
default_value = properties["default"]
if not isinstance(default_value, (int, float)):
raise ValueError(
f"Default value must be a number, got {type(default_value).__name__}"
)
if default_value >= properties.get("maximum", float("inf")):
raise ValueError(
f"Default value exceeds maximum limit of {properties.get('maximum')}"
)
if default_value <= properties.get("minimum", float("-inf")):
raise ValueError(
f"Default value is below minimum limit of {properties.get('minimum')}"
)
if default_value > properties.get("exclusiveMaximum", float("inf")):
raise ValueError(
f"Default value exceeds exclusive maximum limit of {properties.get('exclusiveMaximum')}"
)
if default_value < properties.get("exclusiveMinimum", float("-inf")):
raise ValueError(
f"Default value is below exclusive minimum limit of {properties.get('exclusiveMinimum')}"
)
if "multipleOf" in properties:
if default_value % properties["multipleOf"] != 0:
raise ValueError(
f"Default value {default_value} is not a multiple of {properties['multipleOf']}"
)
return mapped_properties

View File

@@ -1,7 +1,7 @@
[project] [project]
name = "jambo" name = "jambo"
dynamic = ["version"] dynamic = ["version"]
description = "Add your description here" description = "Jambo - JSON Schema to Pydantic Converter"
requires-python = ">=3.10,<4.0" requires-python = ">=3.10,<4.0"
maintainers = [ maintainers = [
{ name = "Vitor Hideyoshi", email = "vitor.h.n.batista@gmail.com" }, { name = "Vitor Hideyoshi", email = "vitor.h.n.batista@gmail.com" },
@@ -18,21 +18,28 @@ classifiers = [
"Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.13",
] ]
license = { file = "LICENSE" } license = "MIT"
readme = "README.md" readme = "README.md"
# Project Dependencies # Project Dependencies
dependencies = [ dependencies = [
"email-validator>=2.2.0",
"jsonschema>=4.23.0", "jsonschema>=4.23.0",
"pydantic>=2.10.6", "pydantic>=2.12.4",
] ]
[dependency-groups] [dependency-groups]
dev = [ dev = [
"coverage>=7.8.0", "coverage>=7.8.0",
"mypy>=1.18.1",
"poethepoet>=0.33.1", "poethepoet>=0.33.1",
"pre-commit>=4.2.0", "pre-commit>=4.2.0",
"ruff>=0.11.4", "ruff>=0.11.4",
"sphinx>=8.1.3",
"sphinx-autobuild>=2024.10.3",
"sphinx-autodoc-typehints>=3.0.1",
"sphinx-rtd-theme>=3.0.2",
"types-jsonschema>=4.25.1.20250822",
] ]
@@ -44,7 +51,10 @@ repository = "https://github.com/HideyoshiNakazone/jambo.git"
# POE Tasks # POE Tasks
[tool.poe.tasks] [tool.poe.tasks]
create-hooks = "bash .githooks/set-hooks.sh" create-hooks = "bash .githooks/set-hooks.sh"
tests = "python -m unittest discover -s tests -v" tests = "python -m coverage run -m unittest discover -v"
tests-report = "python -m coverage xml"
type-check = "mypy jambo"
serve-docs = "sphinx-autobuild docs/source docs/build"
# Build System # Build System
[tool.hatch.version] [tool.hatch.version]
@@ -55,8 +65,20 @@ requires = ["hatchling", "hatch-vcs"]
build-backend = "hatchling.build" build-backend = "hatchling.build"
# Tests
[tool.coverage.run]
omit = [
"tests/*",
]
# Linters # Linters
[tool.ruff.lint]
extend-select = ["I"]
[tool.ruff.lint.isort] [tool.ruff.lint.isort]
known-first-party = ["jambo"]
section-order=[ section-order=[
"future", "future",
"first-party", "first-party",
@@ -64,3 +86,9 @@ section-order=[
"third-party", "third-party",
"standard-library", "standard-library",
] ]
lines-after-imports = 2
[tool.pyright]
venvPath = "."
venv = ".venv"

View File

@@ -0,0 +1,21 @@
from jambo.exceptions.internal_assertion_exception import InternalAssertionException
from unittest import TestCase
class TestInternalAssertionException(TestCase):
def test_inheritance(self):
self.assertTrue(issubclass(InternalAssertionException, RuntimeError))
def test_message(self):
message = "This is an internal assertion error."
expected_message = (
f"Internal Assertion Failed: {message}\n"
"This is likely a bug in Jambo. Please report it at"
)
with self.assertRaises(InternalAssertionException) as ctx:
raise InternalAssertionException(message)
self.assertEqual(str(ctx.exception), expected_message)

View File

@@ -0,0 +1,44 @@
from jambo.exceptions.invalid_schema_exception import InvalidSchemaException
from unittest import TestCase
class TestInternalAssertionException(TestCase):
def test_inheritance(self):
self.assertTrue(issubclass(InvalidSchemaException, ValueError))
def test_message(self):
message = "This is an internal assertion error."
expected_message = f"Invalid JSON Schema: {message}"
with self.assertRaises(InvalidSchemaException) as ctx:
raise InvalidSchemaException(message)
self.assertEqual(str(ctx.exception), expected_message)
def test_invalid_field(self):
message = "This is an internal assertion error."
invalid_field = "testField"
expected_message = (
f"Invalid JSON Schema: {message} (invalid field: {invalid_field})"
)
with self.assertRaises(InvalidSchemaException) as ctx:
raise InvalidSchemaException(message, invalid_field=invalid_field)
self.assertEqual(str(ctx.exception), expected_message)
def test_cause(self):
message = "This is an internal assertion error."
cause = ValueError("Underlying cause")
expected_message = (
f"Invalid JSON Schema: {message} (caused by ValueError: Underlying cause)"
)
with self.assertRaises(InvalidSchemaException) as ctx:
raise InvalidSchemaException(message, cause=cause)
self.assertEqual(str(ctx.exception), expected_message)

View File

@@ -0,0 +1,31 @@
from jambo.exceptions.unsupported_schema_exception import UnsupportedSchemaException
from unittest import TestCase
class TestUnsupportedSchemaException(TestCase):
def test_inheritance(self):
self.assertTrue(issubclass(UnsupportedSchemaException, ValueError))
def test_message(self):
message = "This is an internal assertion error."
expected_message = f"Unsupported JSON Schema: {message}"
with self.assertRaises(UnsupportedSchemaException) as ctx:
raise UnsupportedSchemaException(message)
self.assertEqual(str(ctx.exception), expected_message)
def test_unsupported_field(self):
message = "This is an internal assertion error."
invalid_field = "testField"
expected_message = (
f"Unsupported JSON Schema: {message} (unsupported field: {invalid_field})"
)
with self.assertRaises(UnsupportedSchemaException) as ctx:
raise UnsupportedSchemaException(message, unsupported_field=invalid_field)
self.assertEqual(str(ctx.exception), expected_message)

View File

@@ -0,0 +1,362 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser.allof_type_parser import AllOfTypeParser
from pydantic import ValidationError
from unittest import TestCase
class TestAllOfTypeParser(TestCase):
def test_all_of_type_parser_object_type(self):
"""
Test the AllOfTypeParser with an object type and validate the properties.
When using allOf with object it should be able to validate the properties
and join them correctly.
"""
properties = {
"type": "object",
"allOf": [
{
"properties": {
"name": {
"type": "string",
"minLength": 1,
}
},
},
{
"type": "object",
"properties": {
"name": {
"type": "string",
"maxLength": 4,
},
"age": {
"type": "integer",
"maximum": 100,
"minimum": 0,
},
},
},
],
}
type_parsing, type_validator = AllOfTypeParser().from_properties(
"placeholder", properties, ref_cache={}
)
with self.assertRaises(ValidationError):
type_parsing(name="John", age=101)
with self.assertRaises(ValidationError):
type_parsing(name="", age=30)
with self.assertRaises(ValidationError):
type_parsing(name="John Invalid", age=30)
obj = type_parsing(name="John", age=30)
self.assertEqual(obj.name, "John")
self.assertEqual(obj.age, 30)
def test_all_of_type_parser_object_type_required(self):
"""
Tests the required properties of the AllOfTypeParser with an object type.
"""
properties = {
"type": "object",
"allOf": [
{
"properties": {
"name": {
"type": "string",
}
},
"required": ["name"],
},
{
"type": "object",
"properties": {
"age": {
"type": "integer",
}
},
"required": ["age"],
},
],
}
type_parsing, type_validator = AllOfTypeParser().from_properties(
"placeholder", properties, ref_cache={}
)
with self.assertRaises(ValidationError):
type_parsing(name="John")
with self.assertRaises(ValidationError):
type_parsing(age=30)
obj = type_parsing(name="John", age=30)
self.assertEqual(obj.name, "John")
self.assertEqual(obj.age, 30)
def test_all_of_type_top_level_type(self):
"""
Tests the AllOfTypeParser with a top-level type and validate the properties.
"""
properties = {
"type": "string",
"allOf": [
{"maxLength": 11},
{"maxLength": 4},
{"minLength": 1},
{"minLength": 2},
],
}
type_parsing, type_validator = AllOfTypeParser().from_properties(
"placeholder", properties, ref_cache={}
)
self.assertEqual(type_parsing, str)
self.assertEqual(type_validator["max_length"], 11)
self.assertEqual(type_validator["min_length"], 1)
def test_all_of_type_parser_in_fields(self):
"""
Tests the AllOfTypeParser when set in the fields of a model.
"""
properties = {
"allOf": [
{"type": "string", "maxLength": 11},
{"type": "string", "maxLength": 4},
{"type": "string", "minLength": 1},
{"type": "string", "minLength": 2},
]
}
type_parsing, type_validator = AllOfTypeParser().from_properties(
"placeholder", properties, ref_cache={}
)
self.assertEqual(type_parsing, str)
self.assertEqual(type_validator["max_length"], 11)
self.assertEqual(type_validator["min_length"], 1)
def test_invalid_all_of(self):
"""
Tests that an error is raised when the allOf type is not present.
"""
properties = {
"wrongKey": [
{"type": "string", "maxLength": 11},
{"type": "string", "maxLength": 4},
{"type": "string", "minLength": 1},
{"type": "string", "minLength": 2},
]
}
with self.assertRaises(InvalidSchemaException):
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
def test_all_of_invalid_type_not_present(self):
properties = {
"allOf": [
{"maxLength": 11},
{"maxLength": 4},
{"minLength": 1},
{"minLength": 2},
]
}
with self.assertRaises(InvalidSchemaException):
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
def test_all_of_invalid_type_in_fields(self):
properties = {
"allOf": [
{"type": "string", "maxLength": 11},
{"type": "integer", "maxLength": 4},
{"type": "string", "minLength": 1},
{"minLength": 2},
]
}
with self.assertRaises(InvalidSchemaException):
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
def test_all_of_invalid_type_not_all_equal(self):
"""
Tests that an error is raised when the allOf types are not all equal.
"""
properties = {
"allOf": [
{"type": "string", "maxLength": 11},
{"type": "integer", "maxLength": 4},
{"type": "string", "minLength": 1},
]
}
with self.assertRaises(InvalidSchemaException):
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
def test_all_of_description_field(self):
"""
Tests the AllOfTypeParser with a description field.
"""
properties = {
"type": "object",
"allOf": [
{
"properties": {
"name": {
"type": "string",
"description": "One",
}
},
},
{
"properties": {
"name": {
"type": "string",
"description": "Of",
}
},
},
{
"properties": {
"name": {
"type": "string",
"description": "Us",
}
},
},
],
}
type_parsing, _ = AllOfTypeParser().from_properties(
"placeholder", properties, ref_cache={}
)
self.assertEqual(
type_parsing.model_json_schema()["properties"]["name"]["description"],
"One | Of | Us",
)
def test_all_of_with_defaults(self):
"""
Tests the AllOfTypeParser with a default value.
"""
properties = {
"type": "object",
"allOf": [
{
"properties": {
"name": {
"type": "string",
"default": "John",
}
},
},
{
"properties": {
"name": {
"type": "string",
"default": "John",
},
"age": {
"type": "integer",
"default": 30,
},
},
},
],
}
type_parsing, _ = AllOfTypeParser().from_properties(
"placeholder", properties, ref_cache={}
)
obj = type_parsing()
self.assertEqual(obj.name, "John")
self.assertEqual(obj.age, 30)
def test_all_of_with_conflicting_defaults(self):
"""
Tests the AllOfTypeParser with conflicting default values.
"""
properties = {
"type": "object",
"allOf": [
{
"properties": {
"name": {
"type": "string",
"default": "John",
}
},
},
{
"properties": {
"name": {
"type": "string",
"default": "Doe",
}
},
},
],
}
with self.assertRaises(InvalidSchemaException):
AllOfTypeParser().from_properties("placeholder", properties, ref_cache={})
def test_all_of_with_root_examples(self):
"""
Tests the AllOfTypeParser with examples.
"""
properties = {
"type": "object",
"allOf": [
{
"properties": {
"name": {
"type": "string",
"minLength": 1,
}
},
},
{
"properties": {
"name": {
"type": "string",
"maxLength": 4,
}
},
},
],
"examples": [
{"name": "John"},
{"name": "Jane"},
{"name": "Doe"},
{"name": "Jack"},
],
}
type_parsed, type_properties = AllOfTypeParser().from_properties(
"placeholder", properties, ref_cache={}
)
self.assertEqual(
type_properties["examples"],
[
type_parsed(name="John"),
type_parsed(name="Jane"),
type_parsed(name="Doe"),
type_parsed(name="Jack"),
],
)

View File

@@ -0,0 +1,143 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser.anyof_type_parser import AnyOfTypeParser
from typing_extensions import Annotated, Union, get_args, get_origin
from unittest import TestCase
class TestAnyOfTypeParser(TestCase):
def test_any_with_missing_properties(self):
properties = {
"notAnyOf": [
{"type": "string"},
{"type": "integer"},
],
}
with self.assertRaises(InvalidSchemaException):
AnyOfTypeParser().from_properties("placeholder", properties)
def test_any_of_with_invalid_properties(self):
properties = {
"anyOf": None,
}
with self.assertRaises(InvalidSchemaException):
AnyOfTypeParser().from_properties("placeholder", properties)
def test_any_of_string_or_int(self):
"""
Tests the AnyOfTypeParser with a string or int type.
"""
properties = {
"anyOf": [
{"type": "string"},
{"type": "integer"},
],
}
type_parsing, _ = AnyOfTypeParser().from_properties(
"placeholder", properties, required=True
)
# check union type has string and int
self.assertEqual(get_origin(type_parsing), Union)
type_1, type_2 = get_args(type_parsing)
self.assertEqual(get_origin(type_1), Annotated)
self.assertIn(str, get_args(type_1))
self.assertEqual(get_origin(type_2), Annotated)
self.assertIn(int, get_args(type_2))
def test_any_of_string_or_int_with_default(self):
"""
Tests the AnyOfTypeParser with a string or int type and a default value.
"""
properties = {
"anyOf": [
{"type": "string"},
{"type": "integer"},
],
"default": 42,
}
type_parsing, type_validator = AnyOfTypeParser().from_properties(
"placeholder", properties
)
# check union type has string and int
self.assertEqual(get_origin(type_parsing), Union)
type_1, type_2 = get_args(type_parsing)
self.assertEqual(get_origin(type_1), Annotated)
self.assertIn(str, get_args(type_1))
self.assertEqual(get_origin(type_2), Annotated)
self.assertIn(int, get_args(type_2))
self.assertEqual(type_validator["default"], 42)
def test_any_string_or_int_with_invalid_defaults(self):
"""
Tests the AnyOfTypeParser with a string or int type and an invalid default value.
"""
properties = {
"anyOf": [
{"type": "string"},
{"type": "integer"},
],
"default": 3.14,
}
with self.assertRaises(InvalidSchemaException):
AnyOfTypeParser().from_properties("placeholder", properties)
def test_anyof_with_examples(self):
"""
Tests the AnyOfTypeParser with a string or int type and examples.
"""
properties = {
"anyOf": [
{
"type": "string",
"examples": ["example string"],
},
{
"type": "integer",
"examples": [123],
},
],
}
parsed_type, _ = AnyOfTypeParser().from_properties("placeholder", properties)
type_1, type_2 = get_args(parsed_type)
self.assertEqual(get_args(type_1)[1].examples, ["example string"])
self.assertEqual(get_args(type_2)[1].examples, [123])
def test_any_of_with_root_examples(self):
"""
Tests the AnyOfTypeParser with a string or int type and examples.
"""
properties = {
"anyOf": [
{"type": "string"},
{"type": "integer"},
],
"examples": ["100", 100],
}
_, type_validator = AnyOfTypeParser().from_properties("placeholder", properties)
self.assertEqual(type_validator["examples"], ["100", 100])

View File

@@ -0,0 +1,127 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser import ArrayTypeParser
from typing_extensions import get_args
from unittest import TestCase
class TestArrayTypeParser(TestCase):
def test_array_parser_no_options(self):
parser = ArrayTypeParser()
properties = {"items": {"type": "string"}}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
element_type = get_args(type_parsing)[0]
self.assertEqual(type_parsing.__origin__, list)
self.assertEqual(element_type, str)
def test_array_parser_with_no_items(self):
parser = ArrayTypeParser()
properties = {
"default": ["a", "b", "c", "d"],
"maxItems": 3,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_array_parser_with_options_unique(self):
parser = ArrayTypeParser()
properties = {"items": {"type": "string"}, "uniqueItems": True}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing.__origin__, set)
def test_array_parser_with_options_max_min(self):
parser = ArrayTypeParser()
properties = {"items": {"type": "string"}, "maxItems": 10, "minItems": 1}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing.__origin__, list)
self.assertEqual(type_validator["max_length"], 10)
self.assertEqual(type_validator["min_length"], 1)
def test_array_parser_with_options_default_list(self):
parser = ArrayTypeParser()
properties = {"items": {"type": "string"}, "default": ["a", "b", "c"]}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing.__origin__, list)
self.assertEqual(type_validator["default_factory"](), ["a", "b", "c"])
def test_array_parse_with_options_default_set(self):
parser = ArrayTypeParser()
properties = {
"items": {"type": "string"},
"uniqueItems": True,
"default": ["a", "b", "c"],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing.__origin__, set)
self.assertEqual(type_validator["default_factory"](), {"a", "b", "c"})
def test_array_parser_with_invalid_default_elem_type(self):
parser = ArrayTypeParser()
properties = {"items": {"type": "string"}, "default": ["a", 1, "c"]}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_array_parser_with_invalid_default_type(self):
parser = ArrayTypeParser()
properties = {"items": {"type": "string"}, "default": 000}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties=properties)
def test_array_parser_with_invalid_default_min(self):
parser = ArrayTypeParser()
properties = {"items": {"type": "string"}, "default": ["a"], "minItems": 2}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_array_parser_with_invalid_default_max(self):
parser = ArrayTypeParser()
properties = {
"items": {"type": "string"},
"default": ["a", "b", "c", "d"],
"maxItems": 3,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_array_parser_with_examples(self):
parser = ArrayTypeParser()
properties = {
"items": {"type": "integer"},
"examples": [
[1, 2, 3],
[4, 5, 6],
],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing.__origin__, list)
self.assertEqual(type_validator["examples"], [[1, 2, 3], [4, 5, 6]])

View File

@@ -0,0 +1,60 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser import BooleanTypeParser
from unittest import TestCase
class TestBoolTypeParser(TestCase):
def test_bool_parser_no_options(self):
parser = BooleanTypeParser()
properties = {"type": "boolean"}
type_parsing, type_validator = parser.from_properties_impl(
"placeholder", properties
)
self.assertEqual(type_parsing, bool)
self.assertEqual(type_validator, {"default": None})
def test_bool_parser_with_default(self):
parser = BooleanTypeParser()
properties = {
"type": "boolean",
"default": True,
}
type_parsing, type_validator = parser.from_properties_impl(
"placeholder", properties
)
self.assertEqual(type_parsing, bool)
self.assertEqual(type_validator["default"], True)
def test_bool_parser_with_invalid_default(self):
parser = BooleanTypeParser()
properties = {
"type": "boolean",
"default": "invalid",
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties_impl("placeholder", properties)
def test_bool_parser_with_examples(self):
parser = BooleanTypeParser()
properties = {
"type": "boolean",
"examples": [True, False],
}
type_parsing, type_validator = parser.from_properties_impl(
"placeholder", properties
)
self.assertEqual(type_parsing, bool)
self.assertEqual(type_validator["default"], None)
self.assertEqual(type_validator["examples"], [True, False])

View File

@@ -0,0 +1,107 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser import ConstTypeParser
from typing_extensions import Annotated, Literal, get_args, get_origin
from unittest import TestCase
class TestConstTypeParser(TestCase):
def test_const_type_parser_hashable_value(self):
"""Test const parser with hashable values (uses Literal)"""
parser = ConstTypeParser()
expected_const_value = "United States of America"
properties = {"const": expected_const_value, "examples": [expected_const_value]}
parsed_type, parsed_properties = parser.from_properties_impl(
"country", properties
)
# Check that we get a Literal type for hashable values
self.assertEqual(get_origin(parsed_type), Literal)
self.assertEqual(get_args(parsed_type), (expected_const_value,))
self.assertEqual(parsed_properties["default"], expected_const_value)
self.assertEqual(parsed_properties["examples"], [expected_const_value])
def test_const_type_parser_non_hashable_value(self):
"""Test const parser with non-hashable values (uses Annotated with validator)"""
parser = ConstTypeParser()
expected_const_value = [1, 2, 3] # Lists are not hashable
properties = {"const": expected_const_value, "examples": [expected_const_value]}
parsed_type, parsed_properties = parser.from_properties_impl(
"list_const", properties
)
# Check that we get an Annotated type for non-hashable values
self.assertEqual(get_origin(parsed_type), Annotated)
self.assertIn(list, get_args(parsed_type))
self.assertEqual(parsed_properties["default"], expected_const_value)
self.assertEqual(parsed_properties["examples"], [expected_const_value])
def test_const_type_parser_integer_value(self):
"""Test const parser with integer values (uses Literal)"""
parser = ConstTypeParser()
expected_const_value = 42
properties = {"const": expected_const_value, "examples": [expected_const_value]}
parsed_type, parsed_properties = parser.from_properties_impl(
"int_const", properties
)
# Check that we get a Literal type for hashable values
self.assertEqual(get_origin(parsed_type), Literal)
self.assertEqual(get_args(parsed_type), (expected_const_value,))
self.assertEqual(parsed_properties["default"], expected_const_value)
self.assertEqual(parsed_properties["examples"], [expected_const_value])
def test_const_type_parser_boolean_value(self):
"""Test const parser with boolean values (uses Literal)"""
parser = ConstTypeParser()
expected_const_value = True
properties = {"const": expected_const_value, "examples": [expected_const_value]}
parsed_type, parsed_properties = parser.from_properties_impl(
"bool_const", properties
)
# Check that we get a Literal type for hashable values
self.assertEqual(get_origin(parsed_type), Literal)
self.assertEqual(get_args(parsed_type), (expected_const_value,))
self.assertEqual(parsed_properties["default"], expected_const_value)
self.assertEqual(parsed_properties["examples"], [expected_const_value])
def test_const_type_parser_invalid_properties(self):
parser = ConstTypeParser()
expected_const_value = "United States of America"
properties = {"notConst": expected_const_value}
with self.assertRaises(InvalidSchemaException) as context:
parser.from_properties_impl("invalid_country", properties)
self.assertIn(
"Const type invalid_country must have 'const' property defined",
str(context.exception),
)
def test_const_type_parser_invalid_const_value(self):
parser = ConstTypeParser()
properties = {"const": {}}
with self.assertRaises(InvalidSchemaException) as context:
parser.from_properties_impl("invalid_country", properties)
self.assertIn(
"Const type invalid_country must have 'const' value of allowed types",
str(context.exception),
)

View File

@@ -0,0 +1,115 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser import EnumTypeParser
from enum import Enum
from unittest import TestCase
class TestEnumTypeParser(TestCase):
def test_enum_type_parser_throws_enum_not_defined(self):
parser = EnumTypeParser()
schema = {}
with self.assertRaises(InvalidSchemaException):
parsed_type, parsed_properties = parser.from_properties_impl(
"TestEnum",
schema,
)
def test_enum_type_parser_throws_enum_not_list(self):
parser = EnumTypeParser()
schema = {
"enum": "not_a_list",
}
with self.assertRaises(InvalidSchemaException):
parsed_type, parsed_properties = parser.from_properties_impl(
"TestEnum",
schema,
)
def test_enum_type_parser_creates_enum(self):
parser = EnumTypeParser()
schema = {
"enum": ["value1", "value2", "value3"],
}
parsed_type, parsed_properties = parser.from_properties_impl(
"TestEnum",
schema,
)
self.assertIsInstance(parsed_type, type)
self.assertTrue(issubclass(parsed_type, Enum))
self.assertEqual(
set(parsed_type.__members__.keys()), {"VALUE1", "VALUE2", "VALUE3"}
)
self.assertEqual(parsed_properties, {"default": None})
def test_enum_type_parser_creates_enum_with_default(self):
parser = EnumTypeParser()
schema = {
"enum": ["value1", "value2", "value3"],
"default": "value2",
}
parsed_type, parsed_properties = parser.from_properties_impl(
"TestEnum",
schema,
)
self.assertIsInstance(parsed_type, type)
self.assertTrue(issubclass(parsed_type, Enum))
self.assertEqual(
set(parsed_type.__members__.keys()), {"VALUE1", "VALUE2", "VALUE3"}
)
self.assertEqual(parsed_properties["default"].value, "value2")
def test_enum_type_parser_throws_invalid_default(self):
parser = EnumTypeParser()
schema = {
"enum": ["value1", "value2", "value3"],
"default": "invalid_value",
}
with self.assertRaises(ValueError):
parser.from_properties_impl("TestEnum", schema)
def test_enum_type_parser_throws_invalid_enum_value(self):
parser = EnumTypeParser()
schema = {
"enum": ["value1", 42, dict()],
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties_impl("TestEnum", schema)
def test_enum_type_parser_creates_enum_with_examples(self):
parser = EnumTypeParser()
schema = {
"enum": ["value1", "value2", "value3"],
"examples": ["value1", "value3"],
}
parsed_type, parsed_properties = parser.from_properties_impl(
"TestEnum",
schema,
)
self.assertIsInstance(parsed_type, type)
self.assertTrue(issubclass(parsed_type, Enum))
self.assertEqual(
set(parsed_type.__members__.keys()), {"VALUE1", "VALUE2", "VALUE3"}
)
self.assertEqual(parsed_properties["default"], None)
self.assertEqual(
parsed_properties["examples"],
[getattr(parsed_type, "VALUE1"), getattr(parsed_type, "VALUE3")],
)

View File

@@ -0,0 +1,138 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser import FloatTypeParser
from unittest import TestCase
class TestFloatTypeParser(TestCase):
def test_float_parser_no_options(self):
parser = FloatTypeParser()
properties = {"type": "number"}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, float)
self.assertEqual(type_validator, {"default": None})
def test_float_parser_with_options(self):
parser = FloatTypeParser()
properties = {
"type": "number",
"maximum": 10.5,
"minimum": 1.0,
"multipleOf": 0.5,
"examples": [1.5, 2.5],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, float)
self.assertEqual(type_validator["le"], 10.5)
self.assertEqual(type_validator["ge"], 1.0)
self.assertEqual(type_validator["multiple_of"], 0.5)
self.assertEqual(type_validator["examples"], [1.5, 2.5])
def test_float_parser_with_default(self):
parser = FloatTypeParser()
properties = {
"type": "number",
"default": 5.0,
"maximum": 10.5,
"minimum": 1.0,
"multipleOf": 0.5,
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, float)
self.assertEqual(type_validator["default"], 5.0)
self.assertEqual(type_validator["le"], 10.5)
self.assertEqual(type_validator["ge"], 1.0)
self.assertEqual(type_validator["multiple_of"], 0.5)
def test_float_parser_with_default_invalid_type(self):
parser = FloatTypeParser()
properties = {
"type": "number",
"default": "invalid", # Invalid default value
"maximum": 10.5,
"minimum": 1.0,
"multipleOf": 0.5,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_float_parser_with_default_invalid_maximum(self):
parser = FloatTypeParser()
properties = {
"type": "number",
"default": 15.0,
"maximum": 10.5,
"minimum": 1.0,
"multipleOf": 0.5,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_float_parser_with_default_invalid_minimum(self):
parser = FloatTypeParser()
properties = {
"type": "number",
"default": -5.0,
"maximum": 10.5,
"minimum": 1.0,
"multipleOf": 0.5,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_float_parser_with_default_invalid_exclusive_maximum(self):
parser = FloatTypeParser()
properties = {
"type": "number",
"default": 10.5,
"exclusiveMaximum": 10.5,
"minimum": 1.0,
"multipleOf": 0.5,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_float_parser_with_default_invalid_exclusive_minimum(self):
parser = FloatTypeParser()
properties = {
"type": "number",
"default": 1.0,
"maximum": 10.5,
"exclusiveMinimum": 1.0,
"multipleOf": 0.5,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_float_parser_with_default_invalid_multiple(self):
parser = FloatTypeParser()
properties = {
"type": "number",
"default": 5.0,
"maximum": 10.5,
"minimum": 1.0,
"multipleOf": 2.0,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)

View File

@@ -0,0 +1,138 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser import IntTypeParser
from unittest import TestCase
class TestIntTypeParser(TestCase):
def test_int_parser_no_options(self):
parser = IntTypeParser()
properties = {"type": "integer"}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, int)
self.assertEqual(type_validator, {"default": None})
def test_int_parser_with_options(self):
parser = IntTypeParser()
properties = {
"type": "integer",
"maximum": 10,
"minimum": 1,
"multipleOf": 2,
"examples": [2, 4],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, int)
self.assertEqual(type_validator["le"], 10)
self.assertEqual(type_validator["ge"], 1)
self.assertEqual(type_validator["multiple_of"], 2)
self.assertEqual(type_validator["examples"], [2, 4])
def test_int_parser_with_default(self):
parser = IntTypeParser()
properties = {
"type": "integer",
"default": 6,
"maximum": 10,
"minimum": 1,
"multipleOf": 2,
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, int)
self.assertEqual(type_validator["default"], 6)
self.assertEqual(type_validator["le"], 10)
self.assertEqual(type_validator["ge"], 1)
self.assertEqual(type_validator["multiple_of"], 2)
def test_int_parser_with_default_invalid_type(self):
parser = IntTypeParser()
properties = {
"type": "integer",
"default": "invalid", # Invalid default value
"maximum": 10,
"minimum": 1,
"multipleOf": 2,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_int_parser_with_default_invalid_maximum(self):
parser = IntTypeParser()
properties = {
"type": "integer",
"default": 15,
"maximum": 10,
"minimum": 1,
"multipleOf": 2,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_int_parser_with_default_invalid_minimum(self):
parser = IntTypeParser()
properties = {
"type": "integer",
"default": -5,
"maximum": 10,
"minimum": 1,
"multipleOf": 2,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_int_parser_with_default_invalid_exclusive_maximum(self):
parser = IntTypeParser()
properties = {
"type": "integer",
"default": 10,
"exclusiveMaximum": 10,
"minimum": 1,
"multipleOf": 2,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_int_parser_with_default_invalid_exclusive_minimum(self):
parser = IntTypeParser()
properties = {
"type": "integer",
"default": 1,
"exclusiveMinimum": 1,
"maximum": 10,
"multipleOf": 2,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_int_parser_with_default_invalid_multipleOf(self):
parser = IntTypeParser()
properties = {
"type": "integer",
"default": 5,
"maximum": 10,
"minimum": 1,
"multipleOf": 2,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)

View File

@@ -0,0 +1,45 @@
from jambo.parser import NullTypeParser
from unittest import TestCase
class TestNullTypeParser(TestCase):
def test_null_parser_no_options(self):
parser = NullTypeParser()
properties = {"type": "null"}
type_parsing, type_validator = parser.from_properties_impl(
"placeholder", properties
)
self.assertEqual(type_parsing, type(None))
self.assertEqual(type_validator, {"default": None})
def test_null_parser_with_examples(self):
parser = NullTypeParser()
properties = {
"type": "null",
"examples": [None],
}
type_parsing, type_validator = parser.from_properties_impl(
"placeholder", properties
)
self.assertEqual(type_parsing, type(None))
self.assertEqual(type_validator["default"], None)
self.assertEqual(type_validator["examples"], [None])
def test_null_parser_with_invalid_default(self):
parser = NullTypeParser()
properties = {"type": "null", "default": "invalid"}
type_parsing, type_validator = parser.from_properties_impl(
"placeholder", properties
)
self.assertEqual(type_parsing, type(None))
self.assertEqual(type_validator, {"default": None})

View File

@@ -0,0 +1,109 @@
from jambo.exceptions import InternalAssertionException
from jambo.parser import ObjectTypeParser
from unittest import TestCase
class TestObjectTypeParser(TestCase):
def test_object_type_parser_throws_without_ref_cache(self):
parser = ObjectTypeParser()
properties = {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
with self.assertRaises(InternalAssertionException):
parser.from_properties_impl("placeholder", properties)
def test_object_type_parser(self):
parser = ObjectTypeParser()
properties = {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
Model, _args = parser.from_properties_impl(
"placeholder", properties, ref_cache={}
)
obj = Model(name="name", age=10)
self.assertEqual(obj.name, "name")
self.assertEqual(obj.age, 10)
def test_object_type_parser_with_object_example(self):
parser = ObjectTypeParser()
properties = {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
"examples": [
{
"name": "example_name",
"age": 30,
}
],
}
_, type_validator = parser.from_properties_impl(
"placeholder", properties, ref_cache={}
)
test_example = type_validator["examples"][0]
self.assertEqual(test_example.name, "example_name")
self.assertEqual(test_example.age, 30)
def test_object_type_parser_with_default(self):
parser = ObjectTypeParser()
properties = {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
"default": {
"name": "default_name",
"age": 20,
},
}
_, type_validator = parser.from_properties_impl(
"placeholder", properties, ref_cache={}
)
# Check default value
default_obj = type_validator["default_factory"]()
self.assertEqual(default_obj.name, "default_name")
self.assertEqual(default_obj.age, 20)
# Chekc default factory new object id
new_obj = type_validator["default_factory"]()
self.assertNotEqual(id(default_obj), id(new_obj))
def test_object_type_parser_warns_if_object_override_in_cache(self):
ref_cache = {}
parser = ObjectTypeParser()
properties = {"type": "object", "properties": {}}
with self.assertWarns(UserWarning):
_, type_validator = parser.from_properties_impl(
"placeholder", properties, ref_cache=ref_cache
)
_, type_validator = parser.from_properties_impl(
"placeholder", properties, ref_cache=ref_cache
)

View File

@@ -0,0 +1,602 @@
from jambo import SchemaConverter
from jambo.exceptions import InvalidSchemaException
from jambo.parser.oneof_type_parser import OneOfTypeParser
from pydantic import ValidationError
from unittest import TestCase
class TestOneOfTypeParser(TestCase):
def test_oneof_raises_on_invalid_property(self):
with self.assertRaises(InvalidSchemaException):
OneOfTypeParser().from_properties_impl(
"test_field",
{
# Invalid schema, should have property "oneOf"
},
required=True,
context={},
ref_cache={},
)
with self.assertRaises(InvalidSchemaException):
OneOfTypeParser().from_properties_impl(
"test_field",
{
"oneOf": [], # should throw because oneOf must be a list with at least one item
},
required=True,
context={},
ref_cache={},
)
with self.assertRaises(InvalidSchemaException):
SchemaConverter.build(
{
"title": "Test",
"type": "object",
"properties": {
"value": {
"oneOf": [], # should throw because oneOf requires at least one schema
}
},
}
)
def test_oneof_basic_integer_and_string(self):
schema = {
"title": "Person",
"description": "A person with an ID that can be either an integer or a formatted string",
"type": "object",
"properties": {
"id": {
"oneOf": [
{"type": "integer", "minimum": 1},
{"type": "string", "pattern": "^[A-Z]{2}[0-9]{4}$"},
]
},
},
"required": ["id"],
}
Model = SchemaConverter.build(schema)
obj1 = Model(id=123)
self.assertEqual(obj1.id, 123)
obj2 = Model(id="AB1234")
self.assertEqual(obj2.id, "AB1234")
def test_oneof_validation_failures(self):
schema = {
"title": "Person",
"type": "object",
"properties": {
"id": {
"oneOf": [
{"type": "integer", "minimum": 1},
{"type": "string", "pattern": "^[A-Z]{2}[0-9]{4}$"},
]
},
},
"required": ["id"],
}
Model = SchemaConverter.build(schema)
with self.assertRaises(ValidationError):
Model(id=-5)
with self.assertRaises(ValidationError):
Model(id="invalid")
with self.assertRaises(ValidationError):
Model(id=123.45)
def test_oneof_with_conflicting_schemas(self):
schema = {
"title": "Value",
"type": "object",
"properties": {
"data": {
"oneOf": [
{"type": "number", "multipleOf": 2},
{"type": "number", "multipleOf": 3},
]
},
},
"required": ["data"],
}
Model = SchemaConverter.build(schema)
obj1 = Model(data=4)
self.assertEqual(obj1.data, 4)
obj2 = Model(data=9)
self.assertEqual(obj2.data, 9)
with self.assertRaises(ValidationError) as cm:
Model(data=6)
self.assertIn("matches multiple oneOf schemas", str(cm.exception))
with self.assertRaises(ValidationError):
Model(data=5)
def test_oneof_with_objects(self):
schema = {
"title": "Contact",
"type": "object",
"properties": {
"contact_info": {
"oneOf": [
{
"type": "object",
"properties": {
"email": {"type": "string", "format": "email"}
},
"required": ["email"],
"additionalProperties": False,
},
{
"type": "object",
"properties": {
"phone": {"type": "string", "pattern": "^[0-9-]+$"}
},
"required": ["phone"],
"additionalProperties": False,
},
]
},
},
"required": ["contact_info"],
}
Model = SchemaConverter.build(schema)
obj1 = Model(contact_info={"email": "user@example.com"})
self.assertEqual(obj1.contact_info.email, "user@example.com")
obj2 = Model(contact_info={"phone": "123-456-7890"})
self.assertEqual(obj2.contact_info.phone, "123-456-7890")
with self.assertRaises(ValidationError):
Model(contact_info={"email": "user@example.com", "phone": "123-456-7890"})
def test_oneof_with_discriminator_basic(self):
schema = {
"title": "Pet",
"type": "object",
"properties": {
"pet": {
"oneOf": [
{
"type": "object",
"properties": {
"type": {"const": "cat"},
"meows": {"type": "boolean"},
},
"required": ["type", "meows"],
},
{
"type": "object",
"properties": {
"type": {"const": "dog"},
"barks": {"type": "boolean"},
},
"required": ["type", "barks"],
},
],
"discriminator": {"propertyName": "type"},
}
},
"required": ["pet"],
}
Model = SchemaConverter.build(schema)
cat = Model(pet={"type": "cat", "meows": True})
self.assertEqual(cat.pet.type, "cat")
self.assertEqual(cat.pet.meows, True)
dog = Model(pet={"type": "dog", "barks": False})
self.assertEqual(dog.pet.type, "dog")
self.assertEqual(dog.pet.barks, False)
with self.assertRaises(ValidationError):
Model(pet={"type": "cat", "barks": True})
with self.assertRaises(ValidationError):
Model(pet={"type": "bird", "flies": True})
def test_oneof_with_invalid_types(self):
with self.assertRaises(InvalidSchemaException):
SchemaConverter.build(
{
"title": "Pet",
"type": "object",
"properties": {
"pet": {
"oneOf": [
{
"type": "number",
},
{
"type": "string",
},
],
"discriminator": {"propertyName": "type"},
}
},
"required": ["pet"],
}
)
def test_oneof_with_discriminator_mapping(self):
schema = {
"title": "Vehicle",
"type": "object",
"properties": {
"vehicle": {
"oneOf": [
{
"type": "object",
"properties": {
"vehicle_type": {"const": "car"},
"doors": {
"type": "integer",
"minimum": 2,
"maximum": 4,
},
},
"required": ["vehicle_type", "doors"],
},
{
"type": "object",
"properties": {
"vehicle_type": {"const": "motorcycle"},
"engine_size": {"type": "number", "minimum": 125},
},
"required": ["vehicle_type", "engine_size"],
},
],
"discriminator": {
"propertyName": "vehicle_type",
"mapping": {
"car": "#/properties/vehicle/oneOf/0",
"motorcycle": "#/properties/vehicle/oneOf/1",
},
},
}
},
"required": ["vehicle"],
}
Model = SchemaConverter.build(schema)
car = Model(vehicle={"vehicle_type": "car", "doors": 4})
self.assertEqual(car.vehicle.vehicle_type, "car")
self.assertEqual(car.vehicle.doors, 4)
motorcycle = Model(vehicle={"vehicle_type": "motorcycle", "engine_size": 600.0})
self.assertEqual(motorcycle.vehicle.vehicle_type, "motorcycle")
self.assertEqual(motorcycle.vehicle.engine_size, 600.0)
def test_oneof_with_discriminator_invalid_values(self):
schema = {
"title": "Shape",
"type": "object",
"properties": {
"shape": {
"oneOf": [
{
"type": "object",
"properties": {
"type": {"const": "circle"},
"radius": {"type": "number", "minimum": 0},
},
"required": ["type", "radius"],
},
{
"type": "object",
"properties": {
"type": {"const": "square"},
"side": {"type": "number", "minimum": 0},
},
"required": ["type", "side"],
},
],
"discriminator": {"propertyName": "type"},
}
},
"required": ["shape"],
}
Model = SchemaConverter.build(schema)
with self.assertRaises(ValidationError):
Model(shape={"type": "triangle", "base": 5, "height": 3})
with self.assertRaises(ValidationError):
Model(shape={"type": "circle", "side": 5})
with self.assertRaises(ValidationError):
Model(shape={"radius": 5})
def test_oneof_missing_properties(self):
schema = {
"title": "Test",
"type": "object",
"properties": {
"value": {
"notOneOf": [
{"type": "string"},
{"type": "integer"},
]
},
},
}
with self.assertRaises(InvalidSchemaException):
SchemaConverter.build(schema)
def test_oneof_invalid_properties(self):
schema = {
"title": "Test",
"type": "object",
"properties": {
"value": {"oneOf": None},
},
}
with self.assertRaises(InvalidSchemaException):
SchemaConverter.build(schema)
def test_oneof_with_default_value(self):
schema = {
"title": "Test",
"type": "object",
"properties": {
"value": {
"oneOf": [
{"type": "string"},
{"type": "integer"},
],
"default": "test",
},
},
}
Model = SchemaConverter.build(schema)
obj = Model()
self.assertEqual(obj.value, "test")
def test_oneof_with_invalid_default_value(self):
schema = {
"title": "Test",
"type": "object",
"properties": {
"value": {
"oneOf": [
{"type": "string", "minLength": 5},
{"type": "integer", "minimum": 10},
],
"default": "hi",
},
},
}
with self.assertRaises(InvalidSchemaException):
SchemaConverter.build(schema)
def test_oneof_discriminator_without_property_name(self):
# Should throw because the spec determines propertyName is required for discriminator
with self.assertRaises(InvalidSchemaException):
SchemaConverter.build(
{
"title": "Test",
"type": "object",
"properties": {
"value": {
"oneOf": [
{
"type": "object",
"properties": {
"type": {"const": "a"},
"value": {"type": "string"},
},
},
{
"type": "object",
"properties": {
"type": {"const": "b"},
"value": {"type": "integer"},
},
},
],
"discriminator": {}, # discriminator without propertyName
}
},
}
)
def test_oneof_discriminator_with_invalid_discriminator(self):
# Should throw because a valid discriminator is required
with self.assertRaises(InvalidSchemaException):
SchemaConverter.build(
{
"title": "Test",
"type": "object",
"properties": {
"value": {
"oneOf": [
{
"type": "object",
"properties": {
"type": {"const": "a"},
"value": {"type": "string"},
},
},
{
"type": "object",
"properties": {
"type": {"const": "b"},
"value": {"type": "integer"},
},
},
],
"discriminator": "invalid", # discriminator without propertyName
}
},
}
)
def test_oneof_overlapping_strings_from_docs(self):
"""Test the overlapping strings example from documentation"""
schema = {
"title": "SimpleExample",
"type": "object",
"properties": {
"value": {
"oneOf": [
{"type": "string", "maxLength": 6},
{"type": "string", "minLength": 4},
]
}
},
"required": ["value"],
}
Model = SchemaConverter.build(schema)
# Valid: Short string (matches first schema only)
obj1 = Model(value="hi")
self.assertEqual(obj1.value, "hi")
# Valid: Long string (matches second schema only)
obj2 = Model(value="very long string")
self.assertEqual(obj2.value, "very long string")
# Invalid: Medium string (matches BOTH schemas - violates oneOf)
with self.assertRaises(ValidationError) as cm:
Model(value="hello") # 5 chars: matches maxLength=6 AND minLength=4
self.assertIn("matches multiple oneOf schemas", str(cm.exception))
def test_oneof_shapes_discriminator_from_docs(self):
"""Test the shapes discriminator example from documentation"""
schema = {
"title": "Shape",
"type": "object",
"properties": {
"shape": {
"oneOf": [
{
"type": "object",
"properties": {
"type": {"const": "circle"},
"radius": {"type": "number", "minimum": 0},
},
"required": ["type", "radius"],
},
{
"type": "object",
"properties": {
"type": {"const": "rectangle"},
"width": {"type": "number", "minimum": 0},
"height": {"type": "number", "minimum": 0},
},
"required": ["type", "width", "height"],
},
],
"discriminator": {"propertyName": "type"},
}
},
"required": ["shape"],
}
Model = SchemaConverter.build(schema)
# Valid: Circle
circle = Model(shape={"type": "circle", "radius": 5.0})
self.assertEqual(circle.shape.type, "circle")
self.assertEqual(circle.shape.radius, 5.0)
# Valid: Rectangle
rectangle = Model(shape={"type": "rectangle", "width": 10, "height": 20})
self.assertEqual(rectangle.shape.type, "rectangle")
self.assertEqual(rectangle.shape.width, 10)
self.assertEqual(rectangle.shape.height, 20)
# Invalid: Wrong properties for the type
with self.assertRaises(ValidationError):
Model(shape={"type": "circle", "width": 10})
def test_oneof_with_examples(self):
schema = {
"title": "ExampleTest",
"type": "object",
"properties": {
"value": {
"oneOf": [
{
"type": "string",
"examples": ["example1", "example2"],
},
{
"type": "integer",
"examples": [1, 2, 3],
},
]
}
},
"required": ["value"],
}
Model = SchemaConverter.build(schema)
# Since Pydantic does not natively support oneOf and the validation
# is done via a custom validator, the `value` is represented using `anyOf`
model_schema = Model.model_json_schema()
self.assertEqual(
model_schema["properties"]["value"]["anyOf"][0]["examples"],
["example1", "example2"],
)
self.assertEqual(
model_schema["properties"]["value"]["anyOf"][1]["examples"],
[1, 2, 3],
)
def test_oneof_with_root_examples(self):
schema = {
"title": "ExampleTest",
"type": "object",
"properties": {
"value": {
"oneOf": [
{
"type": "string",
},
{
"type": "integer",
},
],
"examples": ["example1", 2],
}
},
"required": ["value"],
}
Model = SchemaConverter.build(schema)
# Since Pydantic does not natively support oneOf and the validation
# is done via a custom validator, the `value` is represented using `anyOf`
model_schema = Model.model_json_schema()
self.assertEqual(
model_schema["properties"]["value"]["examples"],
["example1", 2],
)

View File

@@ -0,0 +1,522 @@
from jambo.exceptions import InternalAssertionException, InvalidSchemaException
from jambo.parser import ObjectTypeParser, RefTypeParser
from pydantic import ValidationError
from typing_extensions import ForwardRef
from unittest import TestCase
class TestRefTypeParser(TestCase):
def test_ref_type_parser_throws_without_ref(self):
properties = {
"title": "person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
"required": ["name", "age"],
}
with self.assertRaises(InvalidSchemaException):
RefTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
def test_ref_type_parser_throws_without_context(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
},
}
with self.assertRaises(InternalAssertionException):
RefTypeParser().from_properties(
"person",
properties,
ref_cache={},
required=True,
)
def test_ref_type_parser_throws_without_ref_cache(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
},
}
with self.assertRaises(InternalAssertionException):
RefTypeParser().from_properties(
"person",
properties,
context=properties,
required=True,
)
def test_ref_type_parser_throws_if_network_ref_type(self):
properties = {
"title": "person",
"$ref": "https://example.com/schemas/person.json",
}
with self.assertRaises(InvalidSchemaException):
RefTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
def test_ref_type_parser_throws_if_non_root_or_def_ref(self):
# This is invalid because object3 is referencing object2,
# but object2 is not defined in $defs or as a root reference.
properties = {
"title": "object1",
"type": "object",
"properties": {
"object2": {
"type": "object",
"properties": {
"attr1": {
"type": "string",
},
"attr2": {
"type": "integer",
},
},
},
"object3": {
"$ref": "#/$defs/object2",
},
},
}
with self.assertRaises(InvalidSchemaException):
ObjectTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
def test_ref_type_parser_throws_if_def_doesnt_exists(self):
properties = {
"title": "person",
"$ref": "#/$defs/employee",
"$defs": {},
}
with self.assertRaises(InvalidSchemaException):
RefTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
def test_ref_type_parser_throws_if_ref_property_doesnt_exists(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {"person": None},
}
with self.assertRaises(InvalidSchemaException):
RefTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
def test_ref_type_parser_with_def(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
},
}
type_parsing, type_validator = RefTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
self.assertIsInstance(type_parsing, type)
obj = type_parsing(name="John", age=30)
self.assertEqual(obj.name, "John")
self.assertEqual(obj.age, 30)
def test_ref_type_parser_with_forward_ref(self):
properties = {
"title": "person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"emergency_contact": {
"$ref": "#",
},
},
"required": ["name", "age"],
}
model, type_validator = ObjectTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
obj = model(
name="John",
age=30,
emergency_contact=model(
name="Jane",
age=28,
),
)
self.assertEqual(obj.name, "John")
self.assertEqual(obj.age, 30)
self.assertIsInstance(obj.emergency_contact, model)
self.assertEqual(obj.emergency_contact.name, "Jane")
self.assertEqual(obj.emergency_contact.age, 28)
def test_ref_type_parser_invalid_forward_ref(self):
properties = {
# Doesn't have a title, which is required for forward references
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"emergency_contact": {
"$ref": "#",
},
},
"required": ["name", "age"],
}
with self.assertRaises(InvalidSchemaException):
ObjectTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
def test_ref_type_parser_forward_ref_can_checks_validation(self):
properties = {
"title": "person",
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"emergency_contact": {
"$ref": "#",
},
},
"required": ["name", "age"],
}
model, type_validator = ObjectTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
# checks if when created via FowardRef the model is validated correctly.
with self.assertRaises(ValidationError):
model(
name="John",
age=30,
emergency_contact=model(
name="Jane",
),
)
def test_ref_type_parser_with_ciclic_def(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"emergency_contact": {
"$ref": "#/$defs/person",
},
},
}
},
}
model, type_validator = RefTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
obj = model(
name="John",
age=30,
emergency_contact=model(
name="Jane",
age=28,
),
)
self.assertEqual(obj.name, "John")
self.assertEqual(obj.age, 30)
self.assertIsInstance(obj.emergency_contact, model)
self.assertEqual(obj.emergency_contact.name, "Jane")
self.assertEqual(obj.emergency_contact.age, 28)
def test_ref_type_parser_with_repeated_ref(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
"emergency_contact": {
"$ref": "#/$defs/person",
},
"friends": {
"type": "array",
"items": {
"$ref": "#/$defs/person",
},
},
},
}
},
}
model, type_validator = RefTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
obj = model(
name="John",
age=30,
emergency_contact=model(
name="Jane",
age=28,
),
friends=[
model(name="Alice", age=25),
model(name="Bob", age=26),
],
)
self.assertEqual(
type(obj.emergency_contact),
type(obj.friends[0]),
"Emergency contact and friends should be of the same type",
)
def test_ref_type_parser_pre_computed_ref_cache(self):
ref_cache = {}
parent_properties = {
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
},
}
properties1 = {
"title": "person1",
"$ref": "#/$defs/person",
}
model1, _ = RefTypeParser().from_properties(
"person",
properties1,
context=parent_properties,
ref_cache=ref_cache,
required=True,
)
properties2 = {
"title": "person2",
"$ref": "#/$defs/person",
}
model2, _ = RefTypeParser().from_properties(
"person",
properties2,
context=parent_properties,
ref_cache=ref_cache,
required=True,
)
self.assertIs(model1, model2, "Models should be the same instance")
def test_parse_from_strategy_invalid_ref_strategy(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
},
}
with self.assertRaises(InvalidSchemaException):
ref_strategy, ref_name, ref_property = RefTypeParser()._parse_from_strategy(
"invalid_strategy",
"person",
properties,
)
def test_parse_from_strategy_forward_ref(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
},
}
parsed_type = RefTypeParser()._parse_from_strategy(
"forward_ref",
"person",
properties,
)
self.assertIsInstance(parsed_type, ForwardRef)
def test_parse_from_strategy_def_ref(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
},
}
parsed_type = RefTypeParser()._parse_from_strategy(
"def_ref",
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
obj = parsed_type(
name="John",
age=30,
)
self.assertEqual(obj.name, "John")
self.assertEqual(obj.age, 30)
def test_ref_type_parser_with_def_with_examples(self):
properties = {
"title": "person",
"$ref": "#/$defs/person",
"$defs": {
"person": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
},
"examples": [
{"name": "John", "age": 30},
{"name": "Jane", "age": 25},
],
}
_, type_validator = RefTypeParser().from_properties(
"person",
properties,
context=properties,
ref_cache={},
required=True,
)
self.assertEqual(
type_validator.get("examples"),
[
{"name": "John", "age": 30},
{"name": "Jane", "age": 25},
],
)

View File

@@ -0,0 +1,323 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser import StringTypeParser
from pydantic import AnyUrl, EmailStr
import unittest
from datetime import date, datetime, time, timedelta, timezone
from ipaddress import IPv4Address, IPv6Address, ip_address
from unittest import TestCase
from uuid import UUID
class TestStringTypeParser(TestCase):
def test_string_parser_no_options(self):
parser = StringTypeParser()
properties = {"type": "string"}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, str)
def test_string_parser_with_options(self):
parser = StringTypeParser()
properties = {
"type": "string",
"maxLength": 10,
"minLength": 1,
"pattern": "^[a-zA-Z]+$",
"examples": ["test", "TEST"],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, str)
self.assertEqual(type_validator["max_length"], 10)
self.assertEqual(type_validator["min_length"], 1)
self.assertEqual(type_validator["pattern"], "^[a-zA-Z]+$")
self.assertEqual(type_validator["examples"], ["test", "TEST"])
def test_string_parser_with_default_value(self):
parser = StringTypeParser()
properties = {
"type": "string",
"default": "default_value",
"maxLength": 20,
"minLength": 5,
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, str)
self.assertEqual(type_validator["default"], "default_value")
self.assertEqual(type_validator["max_length"], 20)
self.assertEqual(type_validator["min_length"], 5)
def test_string_parser_with_invalid_default_value_type(self):
parser = StringTypeParser()
properties = {
"type": "string",
"default": 12345, # Invalid default value
"maxLength": 20,
"minLength": 5,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_string_parser_with_default_invalid_maxlength(self):
parser = StringTypeParser()
properties = {
"type": "string",
"default": "default_value",
"maxLength": 2,
"minLength": 1,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_string_parser_with_default_invalid_minlength(self):
parser = StringTypeParser()
properties = {
"type": "string",
"default": "a",
"maxLength": 20,
"minLength": 2,
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)
def test_string_parser_with_email_format(self):
parser = StringTypeParser()
properties = {
"type": "string",
"format": "email",
"examples": ["test@example.com"],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, EmailStr)
self.assertEqual(type_validator["examples"], ["test@example.com"])
def test_string_parser_with_uri_format(self):
parser = StringTypeParser()
properties = {
"type": "string",
"format": "uri",
"examples": ["test://domain/resource"],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, AnyUrl)
self.assertEqual(type_validator["examples"], ["test://domain/resource"])
def test_string_parser_with_ip_formats(self):
parser = StringTypeParser()
formats = {"ipv4": IPv4Address, "ipv6": IPv6Address}
examples = {"ipv4": ["192.168.1.1"], "ipv6": ["::1"]}
for ip_format, expected_type in formats.items():
example = examples[ip_format]
properties = {
"type": "string",
"format": ip_format,
"examples": example,
}
type_parsing, type_validator = parser.from_properties(
"placeholder", properties
)
self.assertEqual(type_parsing, expected_type)
self.assertEqual(
type_validator["examples"], [ip_address(e) for e in example]
)
def test_string_parser_with_uuid_format(self):
parser = StringTypeParser()
properties = {
"type": "string",
"format": "uuid",
"examples": ["ab71aaf4-ab6e-43cd-a369-cebdd9f7a4c6"],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, UUID)
self.assertEqual(
type_validator["examples"], [UUID("ab71aaf4-ab6e-43cd-a369-cebdd9f7a4c6")]
)
def test_string_parser_with_time_format(self):
parser = StringTypeParser()
properties = {
"type": "string",
"format": "time",
"examples": ["14:30:00", "09:15:30.500", "10:00:00+02:00"],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, time)
self.assertEqual(
type_validator["examples"],
[
time(hour=14, minute=30, second=0),
time(hour=9, minute=15, second=30, microsecond=500_000),
time(hour=10, minute=0, second=0, tzinfo=timezone(timedelta(hours=2))),
],
)
def test_string_parser_with_pattern_based_formats(self):
parser = StringTypeParser()
format_types = {
"hostname": "example.com",
}
for format_type, example_type in format_types.items():
properties = {
"type": "string",
"format": format_type,
"examples": [example_type],
}
type_parsing, type_validator = parser.from_properties(
"placeholder", properties
)
self.assertEqual(type_parsing, str)
self.assertIn("pattern", type_validator)
self.assertEqual(
type_validator["pattern"], parser.format_pattern_mapping[format_type]
)
self.assertEqual(type_validator["examples"], [example_type])
def test_string_parser_with_unsupported_format(self):
parser = StringTypeParser()
properties = {
"type": "string",
"format": "unsupported-format",
}
with self.assertRaises(InvalidSchemaException) as context:
parser.from_properties("placeholder", properties)
self.assertEqual(
str(context.exception),
"Invalid JSON Schema: Unsupported string format: unsupported-format (invalid field: format)",
)
def test_string_parser_with_date_format(self):
parser = StringTypeParser()
properties = {
"type": "string",
"format": "date",
"examples": ["2025-11-17", "1999-12-31", "2000-01-01"],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, date)
self.assertEqual(
type_validator["examples"],
[
date(year=2025, month=11, day=17),
date(year=1999, month=12, day=31),
date(year=2000, month=1, day=1),
],
)
def test_string_parser_with_datetime_format(self):
parser = StringTypeParser()
properties = {
"type": "string",
"format": "date-time",
"examples": [
"2025-11-17T11:15:00",
"2025-11-17T11:15:00+01:00",
"2025-11-17T11:15:00.123456-05:00",
],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, datetime)
self.assertEqual(
type_validator["examples"],
[
datetime(year=2025, month=11, day=17, hour=11, minute=15, second=0),
datetime(
year=2025,
month=11,
day=17,
hour=11,
minute=15,
second=0,
tzinfo=timezone(timedelta(hours=1)),
),
datetime(
year=2025,
month=11,
day=17,
hour=11,
minute=15,
second=0,
microsecond=123456,
tzinfo=timezone(timedelta(hours=-5)),
),
],
)
def test_string_parser_with_invalid_example_value(self):
with self.assertRaises(InvalidSchemaException):
StringTypeParser().from_properties(
"placeholder",
{
"type": "string",
"format": "email",
"examples": ["invalid-email"],
},
)
@unittest.skip("Duration parsing not yet implemented")
def test_string_parser_with_timedelta_format(self):
parser = StringTypeParser()
properties = {
"type": "string",
"format": "duration",
"examples": ["P1Y2M3DT4H5M6S", "PT30M", "P7D", "PT0.5S"],
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing, timedelta)
self.assertEqual(
type_validator["examples"],
[
timedelta(days=7),
timedelta(minutes=30),
timedelta(hours=4, minutes=5, seconds=6),
timedelta(seconds=0.5),
],
)

View File

@@ -0,0 +1,33 @@
from jambo.exceptions import InvalidSchemaException
from jambo.parser import StringTypeParser
from jambo.parser._type_parser import GenericTypeParser
from unittest import TestCase
class TestGenericTypeParser(TestCase):
def test_get_impl(self):
parser = GenericTypeParser._get_impl({"type": "string"})
self.assertIsInstance(parser(), StringTypeParser)
def test_get_impl_invalid_json_schema(self):
with self.assertRaises(RuntimeError):
StringTypeParser.json_schema_type = None
GenericTypeParser._get_impl({"type": "string"})
StringTypeParser.json_schema_type = "type:string"
def test_get_impl_invalid_type(self):
with self.assertRaises(InvalidSchemaException):
GenericTypeParser._get_impl({"type": "invalid_type"})
def test_invalid_examples_not_list(self):
parser = StringTypeParser()
properties = {
"type": "integer",
"examples": "this should be a list",
}
with self.assertRaises(InvalidSchemaException):
parser.from_properties("placeholder", properties)

File diff suppressed because it is too large Load Diff

View File

@@ -1,139 +0,0 @@
from jambo.parser import (
ArrayTypeParser,
FloatTypeParser,
GenericTypeParser,
IntTypeParser,
ObjectTypeParser,
StringTypeParser,
)
import unittest
from typing import get_args
class TestTypeParser(unittest.TestCase):
def test_get_impl(self):
self.assertEqual(GenericTypeParser.get_impl("integer"), IntTypeParser)
self.assertEqual(GenericTypeParser.get_impl("string"), StringTypeParser)
self.assertEqual(GenericTypeParser.get_impl("number"), FloatTypeParser)
self.assertEqual(GenericTypeParser.get_impl("object"), ObjectTypeParser)
self.assertEqual(GenericTypeParser.get_impl("array"), ArrayTypeParser)
def test_int_parser(self):
parser = IntTypeParser()
type_parsing, type_validator = parser.from_properties(
"placeholder",
{
"type": "integer",
"minimum": 0,
"exclusiveMinimum": 1,
"maximum": 10,
"exclusiveMaximum": 11,
"multipleOf": 2,
},
)
self.assertEqual(type_parsing, int)
self.assertEqual(type_validator["ge"], 0)
self.assertEqual(type_validator["gt"], 1)
self.assertEqual(type_validator["le"], 10)
self.assertEqual(type_validator["lt"], 11)
self.assertEqual(type_validator["multiple_of"], 2)
def test_float_parser(self):
parser = FloatTypeParser()
type_parsing, type_validator = parser.from_properties(
"placeholder",
{
"type": "number",
"minimum": 0,
"exclusiveMinimum": 1,
"maximum": 10,
"exclusiveMaximum": 11,
"multipleOf": 2,
},
)
self.assertEqual(type_parsing, float)
self.assertEqual(type_validator["ge"], 0)
self.assertEqual(type_validator["gt"], 1)
self.assertEqual(type_validator["le"], 10)
self.assertEqual(type_validator["lt"], 11)
self.assertEqual(type_validator["multiple_of"], 2)
def test_string_parser(self):
parser = StringTypeParser()
type_parsing, type_validator = parser.from_properties(
"placeholder",
{
"type": "string",
"maxLength": 10,
"minLength": 1,
"pattern": "[a-zA-Z0-9]",
},
)
self.assertEqual(type_parsing, str)
self.assertEqual(type_validator["max_length"], 10)
self.assertEqual(type_validator["min_length"], 1)
self.assertEqual(type_validator["pattern"], "[a-zA-Z0-9]")
def test_object_parser(self):
parser = ObjectTypeParser()
properties = {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
}
Model, _args = parser.from_properties("placeholder", properties)
obj = Model(name="name", age=10)
self.assertEqual(obj.name, "name")
self.assertEqual(obj.age, 10)
def test_array_of_string_parser(self):
parser = ArrayTypeParser()
expected_definition = (list[str], {})
properties = {"items": {"type": "string"}}
self.assertEqual(
parser.from_properties("placeholder", properties), expected_definition
)
def test_array_of_object_parser(self):
parser = ArrayTypeParser()
properties = {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
},
"maxItems": 10,
"minItems": 1,
"uniqueItems": True,
}
type_parsing, type_validator = parser.from_properties("placeholder", properties)
self.assertEqual(type_parsing.__origin__, set)
self.assertEqual(type_validator["max_length"], 10)
self.assertEqual(type_validator["min_length"], 1)
Model = get_args(type_parsing)[0]
obj = Model(name="name", age=10)
self.assertEqual(obj.name, "name")
self.assertEqual(obj.age, 10)

1562
uv.lock generated

File diff suppressed because it is too large Load Diff