Broken schema generation
This commit is contained in:
parent
8f32b359f0
commit
5d05de95f8
5 changed files with 234 additions and 58 deletions
129
poetry.lock
generated
129
poetry.lock
generated
|
@ -21,6 +21,19 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "astroid"
|
||||
version = "2.8.0"
|
||||
description = "An abstract syntax tree for Python with inference support."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "~=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
lazy-object-proxy = ">=1.4.0"
|
||||
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
|
||||
wrapt = ">=1.11,<1.13"
|
||||
|
||||
[[package]]
|
||||
name = "async-timeout"
|
||||
version = "3.0.1"
|
||||
|
@ -139,6 +152,20 @@ parallel = ["ipyparallel"]
|
|||
qtconsole = ["qtconsole"]
|
||||
test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.17)"]
|
||||
|
||||
[[package]]
|
||||
name = "isort"
|
||||
version = "5.9.3"
|
||||
description = "A Python utility / library to sort Python imports."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6.1,<4.0"
|
||||
|
||||
[package.extras]
|
||||
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
|
||||
requirements_deprecated_finder = ["pipreqs", "pip-api"]
|
||||
colors = ["colorama (>=0.4.3,<0.5.0)"]
|
||||
plugins = ["setuptools"]
|
||||
|
||||
[[package]]
|
||||
name = "jedi"
|
||||
version = "0.18.0"
|
||||
|
@ -154,6 +181,14 @@ parso = ">=0.8.0,<0.9.0"
|
|||
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
|
||||
testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "lazy-object-proxy"
|
||||
version = "1.6.0"
|
||||
description = "A fast and thorough lazy object proxy."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
|
||||
[[package]]
|
||||
name = "matplotlib-inline"
|
||||
version = "0.1.3"
|
||||
|
@ -165,6 +200,14 @@ python-versions = ">=3.5"
|
|||
[package.dependencies]
|
||||
traitlets = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mccabe"
|
||||
version = "0.6.1"
|
||||
description = "McCabe checker, plugin for flake8"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.910"
|
||||
|
@ -232,6 +275,18 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "2.4.0"
|
||||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"]
|
||||
test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.0.0"
|
||||
|
@ -302,6 +357,23 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
|
||||
[[package]]
|
||||
name = "pylint"
|
||||
version = "2.11.1"
|
||||
description = "python code static checker"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "~=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
astroid = ">=2.8.0,<2.9"
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
isort = ">=4.2.5,<6"
|
||||
mccabe = ">=0.6,<0.7"
|
||||
platformdirs = ">=2.2.0"
|
||||
toml = ">=0.7.1"
|
||||
typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "2.4.7"
|
||||
|
@ -409,10 +481,18 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "wrapt"
|
||||
version = "1.12.1"
|
||||
description = "Module for decorators, wrappers and monkey patching."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.8"
|
||||
content-hash = "baa4bd3c38445c3325bdd317ecbfe99ccaf4bef438970ed31f5c49cc782d575e"
|
||||
content-hash = "e643c8bcc3f54c414e388a8c62256c3c0fe9e2fb0374c3f3b4140e2b0684b654"
|
||||
|
||||
[metadata.files]
|
||||
aioredis = [
|
||||
|
@ -423,6 +503,10 @@ appnope = [
|
|||
{file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"},
|
||||
{file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"},
|
||||
]
|
||||
astroid = [
|
||||
{file = "astroid-2.8.0-py3-none-any.whl", hash = "sha256:dcc06f6165f415220013801642bd6c9808a02967070919c4b746c6864c205471"},
|
||||
{file = "astroid-2.8.0.tar.gz", hash = "sha256:fe81f80c0b35264acb5653302ffbd935d394f1775c5e4487df745bf9c2442708"},
|
||||
]
|
||||
async-timeout = [
|
||||
{file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"},
|
||||
{file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"},
|
||||
|
@ -462,14 +546,46 @@ ipython = [
|
|||
{file = "ipython-7.27.0-py3-none-any.whl", hash = "sha256:75b5e060a3417cf64f138e0bb78e58512742c57dc29db5a5058a2b1f0c10df02"},
|
||||
{file = "ipython-7.27.0.tar.gz", hash = "sha256:58b55ebfdfa260dad10d509702dc2857cb25ad82609506b070cf2d7b7df5af13"},
|
||||
]
|
||||
isort = [
|
||||
{file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"},
|
||||
{file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"},
|
||||
]
|
||||
jedi = [
|
||||
{file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"},
|
||||
{file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"},
|
||||
]
|
||||
lazy-object-proxy = [
|
||||
{file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"},
|
||||
{file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"},
|
||||
]
|
||||
matplotlib-inline = [
|
||||
{file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"},
|
||||
{file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"},
|
||||
]
|
||||
mccabe = [
|
||||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"},
|
||||
{file = "mypy-0.910-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb"},
|
||||
|
@ -515,6 +631,10 @@ pickleshare = [
|
|||
{file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"},
|
||||
{file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
|
||||
]
|
||||
platformdirs = [
|
||||
{file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"},
|
||||
{file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"},
|
||||
]
|
||||
pluggy = [
|
||||
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
|
||||
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
|
||||
|
@ -562,6 +682,10 @@ pygments = [
|
|||
{file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"},
|
||||
{file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"},
|
||||
]
|
||||
pylint = [
|
||||
{file = "pylint-2.11.1-py3-none-any.whl", hash = "sha256:0f358e221c45cbd4dad2a1e4b883e75d28acdcccd29d40c76eb72b307269b126"},
|
||||
{file = "pylint-2.11.1.tar.gz", hash = "sha256:2c9843fff1a88ca0ad98a256806c82c5a8f86086e7ccbdb93297d86c3f90c436"},
|
||||
]
|
||||
pyparsing = [
|
||||
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
|
||||
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
|
||||
|
@ -607,3 +731,6 @@ wcwidth = [
|
|||
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
||||
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
||||
]
|
||||
wrapt = [
|
||||
{file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"},
|
||||
]
|
||||
|
|
|
@ -17,6 +17,7 @@ mypy = "^0.910"
|
|||
types-redis = "^3.5.9"
|
||||
types-six = "^1.16.1"
|
||||
python-ulid = "^1.0.3"
|
||||
pylint = "^2.11.1"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^6.2.4"
|
||||
|
|
|
@ -94,6 +94,14 @@ class Operators(Enum):
|
|||
ExpressionOrModelField = Union['Expression', 'NegatedExpression', ModelField]
|
||||
|
||||
|
||||
def embedded(cls):
|
||||
"""
|
||||
Mark a model as embedded to avoid creating multiple indexes if the model is
|
||||
only ever used embedded within other models.
|
||||
"""
|
||||
setattr(cls.Meta, 'embedded', True)
|
||||
|
||||
|
||||
class ExpressionProtocol(Protocol):
|
||||
op: Operators
|
||||
left: ExpressionOrModelField
|
||||
|
@ -166,15 +174,16 @@ class Expression:
|
|||
op: Operators
|
||||
left: ExpressionOrModelField
|
||||
right: ExpressionOrModelField
|
||||
parents: List[Tuple[str, 'RedisModel']]
|
||||
|
||||
def __invert__(self):
|
||||
return NegatedExpression(self)
|
||||
|
||||
def __and__(self, other: ExpressionOrModelField):
|
||||
return Expression(left=self, op=Operators.AND, right=other)
|
||||
return Expression(left=self, op=Operators.AND, right=other, parents=self.parents)
|
||||
|
||||
def __or__(self, other: ExpressionOrModelField):
|
||||
return Expression(left=self, op=Operators.OR, right=other)
|
||||
return Expression(left=self, op=Operators.OR, right=other, parents=self.parents)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
@ -189,26 +198,34 @@ ExpressionOrNegated = Union[Expression, NegatedExpression]
|
|||
|
||||
|
||||
class ExpressionProxy:
|
||||
def __init__(self, field: ModelField):
|
||||
def __init__(self, field: ModelField, parents: List[Tuple[str, 'RedisModel']]):
|
||||
self.field = field
|
||||
self.parents = parents
|
||||
|
||||
def __eq__(self, other: Any) -> Expression: # type: ignore[override]
|
||||
return Expression(left=self.field, op=Operators.EQ, right=other)
|
||||
return Expression(left=self.field, op=Operators.EQ, right=other, parents=self.parents)
|
||||
|
||||
def __ne__(self, other: Any) -> Expression: # type: ignore[override]
|
||||
return Expression(left=self.field, op=Operators.NE, right=other)
|
||||
return Expression(left=self.field, op=Operators.NE, right=other, parents=self.parents)
|
||||
|
||||
def __lt__(self, other: Any) -> Expression: # type: ignore[override]
|
||||
return Expression(left=self.field, op=Operators.LT, right=other)
|
||||
return Expression(left=self.field, op=Operators.LT, right=other, parents=self.parents)
|
||||
|
||||
def __le__(self, other: Any) -> Expression: # type: ignore[override]
|
||||
return Expression(left=self.field, op=Operators.LE, right=other)
|
||||
return Expression(left=self.field, op=Operators.LE, right=other, parents=self.parents)
|
||||
|
||||
def __gt__(self, other: Any) -> Expression: # type: ignore[override]
|
||||
return Expression(left=self.field, op=Operators.GT, right=other)
|
||||
return Expression(left=self.field, op=Operators.GT, right=other, parents=self.parents)
|
||||
|
||||
def __ge__(self, other: Any) -> Expression: # type: ignore[override]
|
||||
return Expression(left=self.field, op=Operators.GE, right=other)
|
||||
return Expression(left=self.field, op=Operators.GE, right=other, parents=self.parents)
|
||||
|
||||
def __getattr__(self, item):
|
||||
attr = getattr(self.field.outer_type_, item)
|
||||
if isinstance(attr, self.__class__):
|
||||
attr.parents.insert(0, (self.field.name, self.field.outer_type_))
|
||||
attr.parents = attr.parents + self.parents
|
||||
return attr
|
||||
|
||||
|
||||
class QueryNotSupportedError(Exception):
|
||||
|
@ -265,7 +282,10 @@ class FindQuery:
|
|||
if self.expressions:
|
||||
self._expression = reduce(operator.and_, self.expressions)
|
||||
else:
|
||||
self._expression = Expression(left=None, right=None, op=Operators.ALL)
|
||||
# TODO: Is there a better way to support the "give me all records" query?
|
||||
# Also -- if we do it this way, we need different type annotations.
|
||||
self._expression = Expression(left=None, right=None, op=Operators.ALL,
|
||||
parents=[])
|
||||
return self._expression
|
||||
|
||||
@property
|
||||
|
@ -316,7 +336,11 @@ class FindQuery:
|
|||
|
||||
@classmethod
|
||||
def resolve_value(cls, field_name: str, field_type: RediSearchFieldTypes,
|
||||
field_info: PydanticFieldInfo, op: Operators, value: Any) -> str:
|
||||
field_info: PydanticFieldInfo, op: Operators, value: Any,
|
||||
parents: List[Tuple[str, 'RedisModel']]) -> str:
|
||||
if parents:
|
||||
prefix = "_".join([p[0] for p in parents])
|
||||
field_name = f"{prefix}_{field_name}"
|
||||
result = ""
|
||||
if field_type is RediSearchFieldTypes.TEXT:
|
||||
result = f"@{field_name}:"
|
||||
|
@ -427,6 +451,9 @@ class FindQuery:
|
|||
field_type = cls.resolve_field_type(expression.left)
|
||||
field_name = expression.left.name
|
||||
field_info = expression.left.field_info
|
||||
if not field_info or not getattr(field_info, "index", None):
|
||||
raise QueryNotSupportedError(f"You tried to query by a field ({field_name}) "
|
||||
f"that isn't indexed. See docs: TODO")
|
||||
else:
|
||||
raise QueryNotSupportedError(f"A query expression should start with either a field "
|
||||
f"or an expression enclosed in parenthesis. See docs: "
|
||||
|
@ -454,7 +481,8 @@ class FindQuery:
|
|||
if isinstance(right, ModelField):
|
||||
raise QueryNotSupportedError("Comparing fields is not supported. See docs: TODO")
|
||||
else:
|
||||
result += cls.resolve_value(field_name, field_type, field_info, expression.op, right)
|
||||
result += cls.resolve_value(field_name, field_type, field_info,
|
||||
expression.op, right, expression.parents)
|
||||
|
||||
if encompassing_expression_is_negated:
|
||||
result = f"-({result})"
|
||||
|
@ -705,6 +733,7 @@ class MetaProtocol(Protocol):
|
|||
primary_key_creator_cls: Type[PrimaryKeyCreator]
|
||||
index_name: str
|
||||
abstract: bool
|
||||
embedded: bool
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
|
@ -722,6 +751,7 @@ class DefaultMeta:
|
|||
primary_key_creator_cls: Optional[Type[PrimaryKeyCreator]] = None
|
||||
index_name: Optional[str] = None
|
||||
abstract: Optional[bool] = False
|
||||
embedded: Optional[bool] = False
|
||||
|
||||
|
||||
class ModelMeta(ModelMetaclass):
|
||||
|
@ -730,6 +760,11 @@ class ModelMeta(ModelMetaclass):
|
|||
def __new__(cls, name, bases, attrs, **kwargs): # noqa C901
|
||||
meta = attrs.pop('Meta', None)
|
||||
new_class = super().__new__(cls, name, bases, attrs, **kwargs)
|
||||
|
||||
# The fact that there is a Meta field and _meta field is important: a
|
||||
# user may have given us a Meta object with their configuration, while
|
||||
# we might have inherited _meta from a parent class, and should
|
||||
# therefore use some of the inherited fields.
|
||||
meta = meta or getattr(new_class, 'Meta', None)
|
||||
base_meta = getattr(new_class, '_meta', None)
|
||||
|
||||
|
@ -739,8 +774,9 @@ class ModelMeta(ModelMetaclass):
|
|||
elif base_meta:
|
||||
new_class._meta = deepcopy(base_meta)
|
||||
new_class.Meta = new_class._meta
|
||||
# Unset inherited values we don't want to reuse (typically based on the model name).
|
||||
new_class._meta.abstract = False
|
||||
# Unset inherited values we don't want to reuse (typically based on
|
||||
# the model name).
|
||||
new_class._meta.embedded = False
|
||||
new_class._meta.model_key_prefix = None
|
||||
new_class._meta.index_name = None
|
||||
else:
|
||||
|
@ -750,7 +786,7 @@ class ModelMeta(ModelMetaclass):
|
|||
# Create proxies for each model field so that we can use the field
|
||||
# in queries, like Model.get(Model.field_name == 1)
|
||||
for field_name, field in new_class.__fields__.items():
|
||||
setattr(new_class, field_name, ExpressionProxy(field))
|
||||
setattr(new_class, field_name, ExpressionProxy(field, []))
|
||||
# Check if this is our FieldInfo version with extended ORM metadata.
|
||||
if isinstance(field.field_info, FieldInfo):
|
||||
if field.field_info.primary_key:
|
||||
|
@ -774,8 +810,9 @@ class ModelMeta(ModelMetaclass):
|
|||
new_class._meta.index_name = f"{new_class._meta.global_key_prefix}:" \
|
||||
f"{new_class._meta.model_key_prefix}:index"
|
||||
|
||||
# Not an abstract model class
|
||||
if abc.ABC not in bases:
|
||||
# Not an abstract model class or embedded model, so we should let the
|
||||
# Migrator create indexes for it.
|
||||
if abc.ABC not in bases and not new_class._meta.embedded:
|
||||
key = f"{new_class.__module__}.{new_class.__qualname__}"
|
||||
model_registry[key] = new_class
|
||||
|
||||
|
@ -967,7 +1004,7 @@ class HashModel(RedisModel, abc.ABC):
|
|||
schema_parts = []
|
||||
|
||||
for name, field in cls.__fields__.items():
|
||||
# TODO: Merge this code with schema_for_type()
|
||||
# TODO: Merge this code with schema_for_type()?
|
||||
_type = field.outer_type_
|
||||
if getattr(field.field_info, 'primary_key', None):
|
||||
if issubclass(_type, str):
|
||||
|
@ -1047,6 +1084,8 @@ class JsonModel(RedisModel, abc.ABC):
|
|||
schema_parts = []
|
||||
json_path = "$"
|
||||
|
||||
if cls.__name__ == "Address":
|
||||
import ipdb; ipdb.set_trace()
|
||||
for name, field in cls.__fields__.items():
|
||||
# TODO: Merge this code with schema_for_type()?
|
||||
_type = field.outer_type_
|
||||
|
@ -1070,21 +1109,20 @@ class JsonModel(RedisModel, abc.ABC):
|
|||
log.warning("Model %s defined an empty list field: %s", cls, name)
|
||||
continue
|
||||
embedded_cls = embedded_cls[0]
|
||||
schema_parts.append(cls.schema_for_type(f"{json_path}.{name}[]", name, f"{name}",
|
||||
# TODO: Should this have a name prefix?
|
||||
schema_parts.append(cls.schema_for_type(f"{json_path}.{name}[]", name, name,
|
||||
embedded_cls, field.field_info))
|
||||
elif issubclass(_type, RedisModel):
|
||||
schema_parts.append(cls.schema_for_type(f"{json_path}.{name}", name, f"{name}", _type,
|
||||
schema_parts.append(cls.schema_for_type(f"{json_path}.{name}", name, name, _type,
|
||||
field.field_info))
|
||||
return schema_parts
|
||||
|
||||
@classmethod
|
||||
# TODO: We need both the "name" of the field (address_line_1) as we'll
|
||||
# find it in the JSON document, AND the name of the field as it should
|
||||
# be in the redisearch schema (address_address_line_1). Maybe both "name"
|
||||
# and "name_prefix"?
|
||||
def schema_for_type(cls, json_path: str, name: str, name_prefix: str, typ: Any,
|
||||
field_info: PydanticFieldInfo) -> str:
|
||||
index_field_name = f"{name_prefix}{name}"
|
||||
if name == "description":
|
||||
import ipdb; ipdb.set_trace()
|
||||
index_field_name = f"{name_prefix}_{name}"
|
||||
should_index = getattr(field_info, 'index', False)
|
||||
|
||||
if get_origin(typ) == list:
|
||||
|
@ -1094,15 +1132,14 @@ class JsonModel(RedisModel, abc.ABC):
|
|||
log.warning("Model %s defined an empty list field: %s", cls, name)
|
||||
return ""
|
||||
embedded_cls = embedded_cls[0]
|
||||
# TODO: We need to pass the "JSON Path so far" which should include the
|
||||
# correct syntax for an array.
|
||||
return cls.schema_for_type(f"{json_path}[]", name, f"{name_prefix}{name}", embedded_cls, field_info)
|
||||
return cls.schema_for_type(f"{json_path}[]", name, f"{name_prefix}{name}",
|
||||
embedded_cls, field_info)
|
||||
elif issubclass(typ, RedisModel):
|
||||
sub_fields = []
|
||||
for embedded_name, field in typ.__fields__.items():
|
||||
sub_fields.append(cls.schema_for_type(f"{json_path}.{embedded_name}",
|
||||
embedded_name,
|
||||
f"{name_prefix}_",
|
||||
f"{name_prefix}_{embedded_name}",
|
||||
field.outer_type_,
|
||||
field.field_info))
|
||||
return " ".join(filter(None, sub_fields))
|
||||
|
|
|
@ -397,7 +397,4 @@ def test_schema():
|
|||
another_integer: int
|
||||
another_float: float
|
||||
|
||||
assert Address.redisearch_schema() == "ON HASH PREFIX 1 redis-developer:tests.test_hash_model.Address: " \
|
||||
"SCHEMA pk TAG a_string TAG a_full_text_string TAG " \
|
||||
"a_full_text_string_fts TEXT an_integer NUMERIC SORTABLE " \
|
||||
"a_float NUMERIC"
|
||||
assert Address.redisearch_schema() == "ON HASH PREFIX 1 redis-developer:tests.test_hash_model.Address: SCHEMA pk TAG SEPARATOR | a_string TAG SEPARATOR | a_full_text_string TAG SEPARATOR | a_full_text_string_fts TEXT an_integer NUMERIC SORTABLE a_float NUMERIC"
|
|
@ -12,7 +12,7 @@ from redis_developer.orm import (
|
|||
JsonModel,
|
||||
Field,
|
||||
)
|
||||
from redis_developer.orm.model import RedisModelError, QueryNotSupportedError, NotFoundError
|
||||
from redis_developer.orm.model import RedisModelError, QueryNotSupportedError, NotFoundError, embedded
|
||||
|
||||
r = redis.Redis()
|
||||
today = datetime.date.today()
|
||||
|
@ -23,21 +23,32 @@ class BaseJsonModel(JsonModel, abc.ABC):
|
|||
global_key_prefix = "redis-developer"
|
||||
|
||||
|
||||
class Address(BaseJsonModel):
|
||||
class EmbeddedJsonModel(BaseJsonModel, abc.ABC):
|
||||
class Meta:
|
||||
embedded = True
|
||||
|
||||
|
||||
class Note(EmbeddedJsonModel):
|
||||
description: str = Field(index=True)
|
||||
created_on: datetime.datetime
|
||||
|
||||
|
||||
class Address(EmbeddedJsonModel):
|
||||
address_line_1: str
|
||||
address_line_2: Optional[str]
|
||||
city: str
|
||||
city: str = Field(index=True)
|
||||
state: str
|
||||
country: str
|
||||
postal_code: str = Field(index=True)
|
||||
note: Optional[Note]
|
||||
|
||||
|
||||
class Item(BaseJsonModel):
|
||||
class Item(EmbeddedJsonModel):
|
||||
price: decimal.Decimal
|
||||
name: str = Field(index=True, full_text_search=True)
|
||||
|
||||
|
||||
class Order(BaseJsonModel):
|
||||
class Order(EmbeddedJsonModel):
|
||||
items: List[Item]
|
||||
total: decimal.Decimal
|
||||
created_on: datetime.datetime
|
||||
|
@ -234,29 +245,44 @@ def test_exact_match_queries(members):
|
|||
|
||||
actual = Member.find(Member.last_name == "Brookins").all()
|
||||
assert actual == [member1, member2]
|
||||
|
||||
|
||||
actual = Member.find(
|
||||
(Member.last_name == "Brookins") & ~(Member.first_name == "Andrew")).all()
|
||||
assert actual == [member2]
|
||||
|
||||
|
||||
actual = Member.find(~(Member.last_name == "Brookins")).all()
|
||||
assert actual == [member3]
|
||||
|
||||
|
||||
actual = Member.find(Member.last_name != "Brookins").all()
|
||||
assert actual == [member3]
|
||||
|
||||
|
||||
actual = Member.find(
|
||||
(Member.last_name == "Brookins") & (Member.first_name == "Andrew")
|
||||
| (Member.first_name == "Kim")
|
||||
).all()
|
||||
assert actual == [member2, member1]
|
||||
|
||||
assert actual == [member1, member2]
|
||||
|
||||
actual = Member.find(Member.first_name == "Kim", Member.last_name == "Brookins").all()
|
||||
assert actual == [member2]
|
||||
|
||||
actual = Member.find(Member.address.city == "Portland").all()
|
||||
assert actual == [member1, member2, member3]
|
||||
|
||||
member1.address.note = Note(description="Weird house",
|
||||
created_on=datetime.datetime.now())
|
||||
member1.save()
|
||||
actual = Member.find(Member.address.note.description == "Weird house").all()
|
||||
assert actual == [member1]
|
||||
|
||||
member1.orders = [
|
||||
Order(items=[Item(price=10.99, name="Ball")],
|
||||
total=10.99,
|
||||
created_on=datetime.datetime.now())
|
||||
]
|
||||
member1.save()
|
||||
actual = Member.find(Member.orders.items.name == "Ball").all()
|
||||
assert actual == [member1]
|
||||
|
||||
|
||||
def test_recursive_query_resolution(members):
|
||||
member1, member2, member3 = members
|
||||
|
@ -425,16 +451,4 @@ def test_not_found():
|
|||
|
||||
|
||||
def test_schema():
|
||||
assert Member.redisearch_schema() == "ON JSON PREFIX 1 " \
|
||||
"redis-developer:tests.test_json_model.Member: " \
|
||||
"SCHEMA $.pk AS pk TAG " \
|
||||
"$.first_name AS first_name TAG " \
|
||||
"$.last_name AS last_name TAG " \
|
||||
"$.email AS email TAG " \
|
||||
"$.age AS age NUMERIC " \
|
||||
"$.address.pk AS address_pk TAG " \
|
||||
"$.address.postal_code AS address_postal_code TAG " \
|
||||
"$.orders[].pk AS orders_pk TAG " \
|
||||
"$.orders[].items[].pk AS orders_items_pk TAG " \
|
||||
"$.orders[].items[].name AS orders_items_name TAG " \
|
||||
"$.orders[].items[].name AS orders_items_name_fts TEXT"
|
||||
assert Member.redisearch_schema() == "ON JSON PREFIX 1 redis-developer:tests.test_json_model.Member: SCHEMA $.pk AS pk TAG SEPARATOR | $.first_name AS first_name TAG SEPARATOR | $.last_name AS last_name TAG SEPARATOR | $.email AS email TAG SEPARATOR | $.age AS age NUMERIC $.address.pk AS address_pk TAG SEPARATOR | $.address.postal_code AS address_postal_code TAG SEPARATOR | $.address.note.pk AS address__pk TAG SEPARATOR | $.address.note.description AS address__description TAG SEPARATOR | $.orders[].pk AS orders_pk TAG SEPARATOR | $.orders[].items[].pk AS orders_items_pk TAG SEPARATOR | $.orders[].items[].name AS orders_items_name TAG SEPARATOR | $.orders[].items[].name AS orders_items_name_fts TEXT"
|
||||
|
|
Loading…
Reference in a new issue