Add Makefile, black, reformat with black

This commit is contained in:
Andrew Brookins 2021-10-20 13:01:46 -07:00
parent cfc50b82bb
commit d2fa4c586f
16 changed files with 978 additions and 366 deletions

0
.install.stamp Normal file
View file

55
Makefile Normal file
View file

@ -0,0 +1,55 @@
NAME := redis_developer
INSTALL_STAMP := .install.stamp
POETRY := $(shell command -v poetry 2> /dev/null)
.DEFAULT_GOAL := help
.PHONY: help
help:
@echo "Please use 'make <target>' where <target> is one of"
@echo ""
@echo " install install packages and prepare environment"
@echo " clean remove all temporary files"
@echo " lint run the code linters"
@echo " format reformat code"
@echo " test run all the tests"
@echo " shell open a Poetry shell"
@echo ""
@echo "Check the Makefile to know exactly what each target is doing."
install: $(INSTALL_STAMP)
$(INSTALL_STAMP): pyproject.toml poetry.lock
@if [ -z $(POETRY) ]; then echo "Poetry could not be found. See https://python-poetry.org/docs/"; exit 2; fi
$(POETRY) install
touch $(INSTALL_STAMP)
.PHONY: clean
clean:
find . -type d -name "__pycache__" | xargs rm -rf {};
rm -rf $(INSTALL_STAMP) .coverage .mypy_cache
.PHONY: lint
lint: $(INSTALL_STAMP)
$(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME)
$(POETRY) run black ./tests/ $(NAME)
$(POETRY) run flake8 --ignore=W503,E501,F401,E731 ./tests/ $(NAME)
$(POETRY) run mypy ./tests/ $(NAME) --ignore-missing-imports
$(POETRY) run bandit -r $(NAME) -s B608
.PHONY: format
format: $(INSTALL_STAMP)
$(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME)
$(POETRY) run black ./tests/ $(NAME)
.PHONY: test
test: $(INSTALL_STAMP)
#$(POETRY) run pytest ./tests/ --cov-report term-missing --cov-fail-under 100 --cov $(NAME)
$(POETRY) run pytest ./tests/
.PHONY: shell
shell: $(INSTALL_STAMP)
$(POETRY) shell
.PHONY: redis
redis:
docker-compose up -d

11
docker-compose.yml Normal file
View file

@ -0,0 +1,11 @@
version: "3.8"
services:
redis:
image: "redislabs/redismod:edge"
entrypoint: ["redis-server", "--appendonly", "yes", "--loadmodule", "/usr/lib/redis/modules/rejson.so"]
restart: always
ports:
- "6380:6379"
volumes:
- ./data:/data

388
poetry.lock generated
View file

@ -23,16 +23,16 @@ python-versions = "*"
[[package]] [[package]]
name = "astroid" name = "astroid"
version = "2.8.0" version = "2.8.3"
description = "An abstract syntax tree for Python with inference support." description = "An abstract syntax tree for Python with inference support."
category = "main" category = "dev"
optional = false optional = false
python-versions = "~=3.6" python-versions = "~=3.6"
[package.dependencies] [package.dependencies]
lazy-object-proxy = ">=1.4.0" lazy-object-proxy = ">=1.4.0"
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
wrapt = ">=1.11,<1.13" wrapt = ">=1.11,<1.14"
[[package]] [[package]]
name = "async-timeout" name = "async-timeout"
@ -72,9 +72,51 @@ category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
[[package]]
name = "bandit"
version = "1.7.0"
description = "Security oriented static analyser for python code."
category = "dev"
optional = false
python-versions = ">=3.5"
[package.dependencies]
colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""}
GitPython = ">=1.0.1"
PyYAML = ">=5.3.1"
six = ">=1.10.0"
stevedore = ">=1.20.0"
[[package]]
name = "black"
version = "21.9b0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.6.2"
[package.dependencies]
click = ">=7.1.2"
mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0,<1"
platformdirs = ">=2"
regex = ">=2020.1.8"
tomli = ">=0.2.6,<2.0.0"
typing-extensions = [
{version = ">=3.10.0.0", markers = "python_version < \"3.10\""},
{version = "!=3.10.0.1", markers = "python_version >= \"3.10\""},
]
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
python2 = ["typed-ast (>=1.4.2)"]
uvloop = ["uvloop (>=0.15.2)"]
[[package]] [[package]]
name = "click" name = "click"
version = "8.0.1" version = "8.0.3"
description = "Composable command line interface toolkit" description = "Composable command line interface toolkit"
category = "main" category = "main"
optional = false optional = false
@ -99,6 +141,42 @@ category = "dev"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.5"
[[package]]
name = "flake8"
version = "4.0.1"
description = "the modular source code checker: pep8 pyflakes and co"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
mccabe = ">=0.6.0,<0.7.0"
pycodestyle = ">=2.8.0,<2.9.0"
pyflakes = ">=2.4.0,<2.5.0"
[[package]]
name = "gitdb"
version = "4.0.7"
description = "Git Object Database"
category = "dev"
optional = false
python-versions = ">=3.4"
[package.dependencies]
smmap = ">=3.0.1,<5"
[[package]]
name = "gitpython"
version = "3.1.24"
description = "GitPython is a python library used to interact with Git repositories"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
gitdb = ">=4.0.1,<5"
typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""}
[[package]] [[package]]
name = "iniconfig" name = "iniconfig"
version = "1.1.1" version = "1.1.1"
@ -122,7 +200,7 @@ toml = {version = ">=0.10.2", markers = "python_version > \"3.6\""}
[[package]] [[package]]
name = "ipython" name = "ipython"
version = "7.27.0" version = "7.28.0"
description = "IPython: Productive Interactive Computing" description = "IPython: Productive Interactive Computing"
category = "dev" category = "dev"
optional = false optional = false
@ -156,7 +234,7 @@ test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipyk
name = "isort" name = "isort"
version = "5.9.3" version = "5.9.3"
description = "A Python utility / library to sort Python imports." description = "A Python utility / library to sort Python imports."
category = "main" category = "dev"
optional = false optional = false
python-versions = ">=3.6.1,<4.0" python-versions = ">=3.6.1,<4.0"
@ -185,7 +263,7 @@ testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"]
name = "lazy-object-proxy" name = "lazy-object-proxy"
version = "1.6.0" version = "1.6.0"
description = "A fast and thorough lazy object proxy." description = "A fast and thorough lazy object proxy."
category = "main" category = "dev"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
@ -204,7 +282,7 @@ traitlets = "*"
name = "mccabe" name = "mccabe"
version = "0.6.1" version = "0.6.1"
description = "McCabe checker, plugin for flake8" description = "McCabe checker, plugin for flake8"
category = "main" category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
@ -212,7 +290,7 @@ python-versions = "*"
name = "mypy" name = "mypy"
version = "0.910" version = "0.910"
description = "Optional static typing for Python" description = "Optional static typing for Python"
category = "main" category = "dev"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.5"
@ -229,7 +307,7 @@ python2 = ["typed-ast (>=1.4.0,<1.5.0)"]
name = "mypy-extensions" name = "mypy-extensions"
version = "0.4.3" version = "0.4.3"
description = "Experimental type system extensions for programs checked with the mypy typechecker." description = "Experimental type system extensions for programs checked with the mypy typechecker."
category = "main" category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
@ -256,6 +334,22 @@ python-versions = ">=3.6"
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
testing = ["docopt", "pytest (<6.0.0)"] testing = ["docopt", "pytest (<6.0.0)"]
[[package]]
name = "pathspec"
version = "0.9.0"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "pbr"
version = "5.6.0"
description = "Python Build Reasonableness"
category = "dev"
optional = false
python-versions = ">=2.6"
[[package]] [[package]]
name = "pexpect" name = "pexpect"
version = "4.8.0" version = "4.8.0"
@ -279,7 +373,7 @@ python-versions = "*"
name = "platformdirs" name = "platformdirs"
version = "2.4.0" version = "2.4.0"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "main" category = "dev"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
@ -334,6 +428,14 @@ category = "dev"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pycodestyle"
version = "2.8.0"
description = "Python style guide checker"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]] [[package]]
name = "pydantic" name = "pydantic"
version = "1.8.2" version = "1.8.2"
@ -349,6 +451,14 @@ typing-extensions = ">=3.7.4.3"
dotenv = ["python-dotenv (>=0.10.4)"] dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"] email = ["email-validator (>=1.0.3)"]
[[package]]
name = "pyflakes"
version = "2.4.0"
description = "passive checker of Python programs"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]] [[package]]
name = "pygments" name = "pygments"
version = "2.10.0" version = "2.10.0"
@ -361,7 +471,7 @@ python-versions = ">=3.5"
name = "pylint" name = "pylint"
version = "2.11.1" version = "2.11.1"
description = "python code static checker" description = "python code static checker"
category = "main" category = "dev"
optional = false optional = false
python-versions = "~=3.6" python-versions = "~=3.6"
@ -411,6 +521,14 @@ category = "main"
optional = false optional = false
python-versions = "*" python-versions = "*"
[[package]]
name = "pyyaml"
version = "6.0"
description = "YAML parser and emitter for Python"
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]] [[package]]
name = "redis" name = "redis"
version = "3.5.3" version = "3.5.3"
@ -422,6 +540,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras] [package.extras]
hiredis = ["hiredis (>=0.1.3)"] hiredis = ["hiredis (>=0.1.3)"]
[[package]]
name = "regex"
version = "2021.10.8"
description = "Alternative regular expression module, to replace re."
category = "dev"
optional = false
python-versions = "*"
[[package]] [[package]]
name = "six" name = "six"
version = "1.16.0" version = "1.16.0"
@ -430,14 +556,41 @@ category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "smmap"
version = "4.0.0"
description = "A pure Python implementation of a sliding window memory map manager"
category = "dev"
optional = false
python-versions = ">=3.5"
[[package]]
name = "stevedore"
version = "3.5.0"
description = "Manage dynamic plugins for Python applications"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
pbr = ">=2.0.0,<2.1.0 || >2.1.0"
[[package]] [[package]]
name = "toml" name = "toml"
version = "0.10.2" version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language" description = "Python Library for Tom's Obvious, Minimal Language"
category = "main" category = "dev"
optional = false optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "tomli"
version = "1.2.1"
description = "A lil' TOML parser"
category = "dev"
optional = false
python-versions = ">=3.6"
[[package]] [[package]]
name = "traitlets" name = "traitlets"
version = "5.1.0" version = "5.1.0"
@ -451,7 +604,7 @@ test = ["pytest"]
[[package]] [[package]]
name = "types-redis" name = "types-redis"
version = "3.5.9" version = "3.5.15"
description = "Typing stubs for redis" description = "Typing stubs for redis"
category = "main" category = "main"
optional = false optional = false
@ -459,7 +612,7 @@ python-versions = "*"
[[package]] [[package]]
name = "types-six" name = "types-six"
version = "1.16.1" version = "1.16.2"
description = "Typing stubs for six" description = "Typing stubs for six"
category = "main" category = "main"
optional = false optional = false
@ -483,16 +636,16 @@ python-versions = "*"
[[package]] [[package]]
name = "wrapt" name = "wrapt"
version = "1.12.1" version = "1.13.2"
description = "Module for decorators, wrappers and monkey patching." description = "Module for decorators, wrappers and monkey patching."
category = "main" category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.8" python-versions = "^3.8"
content-hash = "e643c8bcc3f54c414e388a8c62256c3c0fe9e2fb0374c3f3b4140e2b0684b654" content-hash = "f1ccd73314f307ce41497d093ddce99cfb96ebf1814e854a94e37d5156647967"
[metadata.files] [metadata.files]
aioredis = [ aioredis = [
@ -504,8 +657,8 @@ appnope = [
{file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"},
] ]
astroid = [ astroid = [
{file = "astroid-2.8.0-py3-none-any.whl", hash = "sha256:dcc06f6165f415220013801642bd6c9808a02967070919c4b746c6864c205471"}, {file = "astroid-2.8.3-py3-none-any.whl", hash = "sha256:f9d66e3a4a0e5b52819b2ff41ac2b179df9d180697db71c92beb33a60c661794"},
{file = "astroid-2.8.0.tar.gz", hash = "sha256:fe81f80c0b35264acb5653302ffbd935d394f1775c5e4487df745bf9c2442708"}, {file = "astroid-2.8.3.tar.gz", hash = "sha256:0e361da0744d5011d4f5d57e64473ba9b7ab4da1e2d45d6631ebd67dd28c3cce"},
] ]
async-timeout = [ async-timeout = [
{file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"},
@ -523,9 +676,17 @@ backcall = [
{file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"},
{file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
] ]
bandit = [
{file = "bandit-1.7.0-py3-none-any.whl", hash = "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07"},
{file = "bandit-1.7.0.tar.gz", hash = "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608"},
]
black = [
{file = "black-21.9b0-py3-none-any.whl", hash = "sha256:380f1b5da05e5a1429225676655dddb96f5ae8c75bdf91e53d798871b902a115"},
{file = "black-21.9b0.tar.gz", hash = "sha256:7de4cfc7eb6b710de325712d40125689101d21d25283eed7e9998722cf10eb91"},
]
click = [ click = [
{file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"},
{file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"},
] ]
colorama = [ colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
@ -535,6 +696,18 @@ decorator = [
{file = "decorator-5.1.0-py3-none-any.whl", hash = "sha256:7b12e7c3c6ab203a29e157335e9122cb03de9ab7264b137594103fd4a683b374"}, {file = "decorator-5.1.0-py3-none-any.whl", hash = "sha256:7b12e7c3c6ab203a29e157335e9122cb03de9ab7264b137594103fd4a683b374"},
{file = "decorator-5.1.0.tar.gz", hash = "sha256:e59913af105b9860aa2c8d3272d9de5a56a4e608db9a2f167a8480b323d529a7"}, {file = "decorator-5.1.0.tar.gz", hash = "sha256:e59913af105b9860aa2c8d3272d9de5a56a4e608db9a2f167a8480b323d529a7"},
] ]
flake8 = [
{file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"},
{file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
]
gitdb = [
{file = "gitdb-4.0.7-py3-none-any.whl", hash = "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0"},
{file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"},
]
gitpython = [
{file = "GitPython-3.1.24-py3-none-any.whl", hash = "sha256:dc0a7f2f697657acc8d7f89033e8b1ea94dd90356b2983bca89dc8d2ab3cc647"},
{file = "GitPython-3.1.24.tar.gz", hash = "sha256:df83fdf5e684fef7c6ee2c02fc68a5ceb7e7e759d08b694088d0cacb4eba59e5"},
]
iniconfig = [ iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
@ -543,8 +716,8 @@ ipdb = [
{file = "ipdb-0.13.9.tar.gz", hash = "sha256:951bd9a64731c444fd907a5ce268543020086a697f6be08f7cc2c9a752a278c5"}, {file = "ipdb-0.13.9.tar.gz", hash = "sha256:951bd9a64731c444fd907a5ce268543020086a697f6be08f7cc2c9a752a278c5"},
] ]
ipython = [ ipython = [
{file = "ipython-7.27.0-py3-none-any.whl", hash = "sha256:75b5e060a3417cf64f138e0bb78e58512742c57dc29db5a5058a2b1f0c10df02"}, {file = "ipython-7.28.0-py3-none-any.whl", hash = "sha256:f16148f9163e1e526f1008d7c8d966d9c15600ca20d1a754287cf96d00ba6f1d"},
{file = "ipython-7.27.0.tar.gz", hash = "sha256:58b55ebfdfa260dad10d509702dc2857cb25ad82609506b070cf2d7b7df5af13"}, {file = "ipython-7.28.0.tar.gz", hash = "sha256:2097be5c814d1b974aea57673176a924c4c8c9583890e7a5f082f547b9975b11"},
] ]
isort = [ isort = [
{file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"}, {file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"},
@ -623,6 +796,14 @@ parso = [
{file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"},
{file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"},
] ]
pathspec = [
{file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
]
pbr = [
{file = "pbr-5.6.0-py2.py3-none-any.whl", hash = "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4"},
{file = "pbr-5.6.0.tar.gz", hash = "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd"},
]
pexpect = [ pexpect = [
{file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
{file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
@ -654,6 +835,10 @@ py = [
{file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"},
{file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"},
] ]
pycodestyle = [
{file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
{file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"},
]
pydantic = [ pydantic = [
{file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"}, {file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"},
{file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"}, {file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"},
@ -678,6 +863,10 @@ pydantic = [
{file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"}, {file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"},
{file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"}, {file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"},
] ]
pyflakes = [
{file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"},
{file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},
]
pygments = [ pygments = [
{file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"},
{file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"},
@ -698,29 +887,125 @@ python-ulid = [
{file = "python-ulid-1.0.3.tar.gz", hash = "sha256:5dd8b969312a40e2212cec9c1ad63f25d4b6eafd92ee3195883e0287b6e9d19e"}, {file = "python-ulid-1.0.3.tar.gz", hash = "sha256:5dd8b969312a40e2212cec9c1ad63f25d4b6eafd92ee3195883e0287b6e9d19e"},
{file = "python_ulid-1.0.3-py3-none-any.whl", hash = "sha256:8704dc20f547f531fe3a41d4369842d737a0f275403b909d0872e7ea0fe8d6f2"}, {file = "python_ulid-1.0.3-py3-none-any.whl", hash = "sha256:8704dc20f547f531fe3a41d4369842d737a0f275403b909d0872e7ea0fe8d6f2"},
] ]
pyyaml = [
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
{file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
{file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
{file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
{file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
{file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
{file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
{file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
{file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
{file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
{file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
{file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
redis = [ redis = [
{file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"},
{file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"}, {file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"},
] ]
regex = [
{file = "regex-2021.10.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:094a905e87a4171508c2a0e10217795f83c636ccc05ddf86e7272c26e14056ae"},
{file = "regex-2021.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:981c786293a3115bc14c103086ae54e5ee50ca57f4c02ce7cf1b60318d1e8072"},
{file = "regex-2021.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b0f2f874c6a157c91708ac352470cb3bef8e8814f5325e3c5c7a0533064c6a24"},
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51feefd58ac38eb91a21921b047da8644155e5678e9066af7bcb30ee0dca7361"},
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8de658d7db5987b11097445f2b1f134400e2232cb40e614e5f7b6f5428710e"},
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1ce02f420a7ec3b2480fe6746d756530f69769292eca363218c2291d0b116a01"},
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39079ebf54156be6e6902f5c70c078f453350616cfe7bfd2dd15bdb3eac20ccc"},
{file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ff24897f6b2001c38a805d53b6ae72267025878d35ea225aa24675fbff2dba7f"},
{file = "regex-2021.10.8-cp310-cp310-win32.whl", hash = "sha256:c6569ba7b948c3d61d27f04e2b08ebee24fec9ff8e9ea154d8d1e975b175bfa7"},
{file = "regex-2021.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:45cb0f7ff782ef51bc79e227a87e4e8f24bc68192f8de4f18aae60b1d60bc152"},
{file = "regex-2021.10.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fab3ab8aedfb443abb36729410403f0fe7f60ad860c19a979d47fb3eb98ef820"},
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e55f8d66f1b41d44bc44c891bcf2c7fad252f8f323ee86fba99d71fd1ad5e3"},
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d52c5e089edbdb6083391faffbe70329b804652a53c2fdca3533e99ab0580d9"},
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1abbd95cbe9e2467cac65c77b6abd9223df717c7ae91a628502de67c73bf6838"},
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9b5c215f3870aa9b011c00daeb7be7e1ae4ecd628e9beb6d7e6107e07d81287"},
{file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f540f153c4f5617bc4ba6433534f8916d96366a08797cbbe4132c37b70403e92"},
{file = "regex-2021.10.8-cp36-cp36m-win32.whl", hash = "sha256:1f51926db492440e66c89cd2be042f2396cf91e5b05383acd7372b8cb7da373f"},
{file = "regex-2021.10.8-cp36-cp36m-win_amd64.whl", hash = "sha256:5f55c4804797ef7381518e683249310f7f9646da271b71cb6b3552416c7894ee"},
{file = "regex-2021.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb2baff66b7d2267e07ef71e17d01283b55b3cc51a81b54cc385e721ae172ba4"},
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e527ab1c4c7cf2643d93406c04e1d289a9d12966529381ce8163c4d2abe4faf"},
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c98b013273e9da5790ff6002ab326e3f81072b4616fd95f06c8fa733d2745f"},
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:55ef044899706c10bc0aa052f2fc2e58551e2510694d6aae13f37c50f3f6ff61"},
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0ab3530a279a3b7f50f852f1bab41bc304f098350b03e30a3876b7dd89840e"},
{file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a37305eb3199d8f0d8125ec2fb143ba94ff6d6d92554c4b8d4a8435795a6eccd"},
{file = "regex-2021.10.8-cp37-cp37m-win32.whl", hash = "sha256:2efd47704bbb016136fe34dfb74c805b1ef5c7313aef3ce6dcb5ff844299f432"},
{file = "regex-2021.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:924079d5590979c0e961681507eb1773a142553564ccae18d36f1de7324e71ca"},
{file = "regex-2021.10.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19b8f6d23b2dc93e8e1e7e288d3010e58fafed323474cf7f27ab9451635136d9"},
{file = "regex-2021.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b09d3904bf312d11308d9a2867427479d277365b1617e48ad09696fa7dfcdf59"},
{file = "regex-2021.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:951be934dc25d8779d92b530e922de44dda3c82a509cdb5d619f3a0b1491fafa"},
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f125fce0a0ae4fd5c3388d369d7a7d78f185f904c90dd235f7ecf8fe13fa741"},
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f199419a81c1016e0560c39773c12f0bd924c37715bffc64b97140d2c314354"},
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:09e1031e2059abd91177c302da392a7b6859ceda038be9e015b522a182c89e4f"},
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c070d5895ac6aeb665bd3cd79f673775caf8d33a0b569e98ac434617ecea57d"},
{file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:176796cb7f82a7098b0c436d6daac82f57b9101bb17b8e8119c36eecf06a60a3"},
{file = "regex-2021.10.8-cp38-cp38-win32.whl", hash = "sha256:5e5796d2f36d3c48875514c5cd9e4325a1ca172fc6c78b469faa8ddd3d770593"},
{file = "regex-2021.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:e4204708fa116dd03436a337e8e84261bc8051d058221ec63535c9403a1582a1"},
{file = "regex-2021.10.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6dcf53d35850ce938b4f044a43b33015ebde292840cef3af2c8eb4c860730fff"},
{file = "regex-2021.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8b6ee6555b6fbae578f1468b3f685cdfe7940a65675611365a7ea1f8d724991"},
{file = "regex-2021.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2ec1c106d3f754444abf63b31e5c4f9b5d272272a491fa4320475aba9e8157c"},
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973499dac63625a5ef9dfa4c791aa33a502ddb7615d992bdc89cf2cc2285daa3"},
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88dc3c1acd3f0ecfde5f95c32fcb9beda709dbdf5012acdcf66acbc4794468eb"},
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4786dae85c1f0624ac77cb3813ed99267c9adb72e59fdc7297e1cf4d6036d493"},
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe6ce4f3d3c48f9f402da1ceb571548133d3322003ce01b20d960a82251695d2"},
{file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e3e2cea8f1993f476a6833ef157f5d9e8c75a59a8d8b0395a9a6887a097243b"},
{file = "regex-2021.10.8-cp39-cp39-win32.whl", hash = "sha256:82cfb97a36b1a53de32b642482c6c46b6ce80803854445e19bc49993655ebf3b"},
{file = "regex-2021.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:b04e512eb628ea82ed86eb31c0f7fc6842b46bf2601b66b1356a7008327f7700"},
{file = "regex-2021.10.8.tar.gz", hash = "sha256:26895d7c9bbda5c52b3635ce5991caa90fbb1ddfac9c9ff1c7ce505e2282fb2a"},
]
six = [ six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
] ]
smmap = [
{file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"},
{file = "smmap-4.0.0.tar.gz", hash = "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"},
]
stevedore = [
{file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"},
{file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"},
]
toml = [ toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
] ]
tomli = [
{file = "tomli-1.2.1-py3-none-any.whl", hash = "sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f"},
{file = "tomli-1.2.1.tar.gz", hash = "sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442"},
]
traitlets = [ traitlets = [
{file = "traitlets-5.1.0-py3-none-any.whl", hash = "sha256:03f172516916220b58c9f19d7f854734136dd9528103d04e9bf139a92c9f54c4"}, {file = "traitlets-5.1.0-py3-none-any.whl", hash = "sha256:03f172516916220b58c9f19d7f854734136dd9528103d04e9bf139a92c9f54c4"},
{file = "traitlets-5.1.0.tar.gz", hash = "sha256:bd382d7ea181fbbcce157c133db9a829ce06edffe097bcf3ab945b435452b46d"}, {file = "traitlets-5.1.0.tar.gz", hash = "sha256:bd382d7ea181fbbcce157c133db9a829ce06edffe097bcf3ab945b435452b46d"},
] ]
types-redis = [ types-redis = [
{file = "types-redis-3.5.9.tar.gz", hash = "sha256:f142c48f4080757ca2a9441ec40213bda3b1535eebebfc4f3519e5aa46498076"}, {file = "types-redis-3.5.15.tar.gz", hash = "sha256:e52be0077ca1189d8cce813a20c2a70e9e577f34ab898371c6cbed696a88bdee"},
{file = "types_redis-3.5.9-py3-none-any.whl", hash = "sha256:5f5648ffc025708858097173cf695164c20f2b5e3f57177de14e352cae8cc335"}, {file = "types_redis-3.5.15-py3-none-any.whl", hash = "sha256:e617c08bff88449b52f6dbdaa9bb81a806f27c89fd30bbf98fe9683ed5d1046a"},
] ]
types-six = [ types-six = [
{file = "types-six-1.16.1.tar.gz", hash = "sha256:a9e6769cb0808f920958ac95f75c5191f49e21e041eac127fa62e286e1005616"}, {file = "types-six-1.16.2.tar.gz", hash = "sha256:b96bd911f87d15258c38e10ee3f0921c32887a5d22e41c39d15707b4d0e4d0f1"},
{file = "types_six-1.16.1-py2.py3-none-any.whl", hash = "sha256:b14f5abe26c0997bd41a1a32d6816af25932f7bfbc54246dfdc8f6f6404fd1d4"}, {file = "types_six-1.16.2-py2.py3-none-any.whl", hash = "sha256:606dd8c7edff3100fae8277c270e65285e5cdb6a7819c0b1ea6a8973690e68da"},
] ]
typing-extensions = [ typing-extensions = [
{file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"},
@ -732,5 +1017,48 @@ wcwidth = [
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
] ]
wrapt = [ wrapt = [
{file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, {file = "wrapt-1.13.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3de7b4d3066cc610054e7aa2c005645e308df2f92be730aae3a47d42e910566a"},
{file = "wrapt-1.13.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:8164069f775c698d15582bf6320a4f308c50d048c1c10cf7d7a341feaccf5df7"},
{file = "wrapt-1.13.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9adee1891253670575028279de8365c3a02d3489a74a66d774c321472939a0b1"},
{file = "wrapt-1.13.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a70d876c9aba12d3bd7f8f1b05b419322c6789beb717044eea2c8690d35cb91b"},
{file = "wrapt-1.13.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3f87042623530bcffea038f824b63084180513c21e2e977291a9a7e65a66f13b"},
{file = "wrapt-1.13.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:e634136f700a21e1fcead0c137f433dde928979538c14907640607d43537d468"},
{file = "wrapt-1.13.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3e33c138d1e3620b1e0cc6fd21e46c266393ed5dae0d595b7ed5a6b73ed57aa0"},
{file = "wrapt-1.13.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:283e402e5357e104ac1e3fba5791220648e9af6fb14ad7d9cc059091af2b31d2"},
{file = "wrapt-1.13.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ccb34ce599cab7f36a4c90318697ead18312c67a9a76327b3f4f902af8f68ea1"},
{file = "wrapt-1.13.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:fbad5ba74c46517e6488149514b2e2348d40df88cd6b52a83855b7a8bf04723f"},
{file = "wrapt-1.13.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:724ed2bc9c91a2b9026e5adce310fa60c6e7c8760b03391445730b9789b9d108"},
{file = "wrapt-1.13.2-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:83f2793ec6f3ef513ad8d5b9586f5ee6081cad132e6eae2ecb7eac1cc3decae0"},
{file = "wrapt-1.13.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:0473d1558b93e314e84313cc611f6c86be779369f9d3734302bf185a4d2625b1"},
{file = "wrapt-1.13.2-cp35-cp35m-win32.whl", hash = "sha256:15eee0e6fd07f48af2f66d0e6f2ff1916ffe9732d464d5e2390695296872cad9"},
{file = "wrapt-1.13.2-cp35-cp35m-win_amd64.whl", hash = "sha256:bc85d17d90201afd88e3d25421da805e4e135012b5d1f149e4de2981394b2a52"},
{file = "wrapt-1.13.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c6ee5f8734820c21b9b8bf705e99faba87f21566d20626568eeb0d62cbeaf23c"},
{file = "wrapt-1.13.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:53c6706a1bcfb6436f1625511b95b812798a6d2ccc51359cd791e33722b5ea32"},
{file = "wrapt-1.13.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fbe6aebc9559fed7ea27de51c2bf5c25ba2a4156cf0017556f72883f2496ee9a"},
{file = "wrapt-1.13.2-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:0582180566e7a13030f896c2f1ac6a56134ab5f3c3f4c5538086f758b1caf3f2"},
{file = "wrapt-1.13.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:bff0a59387a0a2951cb869251257b6553663329a1b5525b5226cab8c88dcbe7e"},
{file = "wrapt-1.13.2-cp36-cp36m-win32.whl", hash = "sha256:df3eae297a5f1594d1feb790338120f717dac1fa7d6feed7b411f87e0f2401c7"},
{file = "wrapt-1.13.2-cp36-cp36m-win_amd64.whl", hash = "sha256:1eb657ed84f4d3e6ad648483c8a80a0cf0a78922ef94caa87d327e2e1ad49b48"},
{file = "wrapt-1.13.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0cdedf681db878416c05e1831ec69691b0e6577ac7dca9d4f815632e3549580"},
{file = "wrapt-1.13.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:87ee3c73bdfb4367b26c57259995935501829f00c7b3eed373e2ad19ec21e4e4"},
{file = "wrapt-1.13.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3e0d16eedc242d01a6f8cf0623e9cdc3b869329da3f97a15961d8864111d8cf0"},
{file = "wrapt-1.13.2-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:8318088860968c07e741537030b1abdd8908ee2c71fbe4facdaade624a09e006"},
{file = "wrapt-1.13.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d90520616fce71c05dedeac3a0fe9991605f0acacd276e5f821842e454485a70"},
{file = "wrapt-1.13.2-cp37-cp37m-win32.whl", hash = "sha256:22142afab65daffc95863d78effcbd31c19a8003eca73de59f321ee77f73cadb"},
{file = "wrapt-1.13.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d0d717e10f952df7ea41200c507cc7e24458f4c45b56c36ad418d2e79dacd1d4"},
{file = "wrapt-1.13.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:593cb049ce1c391e0288523b30426c4430b26e74c7e6f6e2844bd99ac7ecc831"},
{file = "wrapt-1.13.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8860c8011a6961a651b1b9f46fdbc589ab63b0a50d645f7d92659618a3655867"},
{file = "wrapt-1.13.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ada5e29e59e2feb710589ca1c79fd989b1dd94d27079dc1d199ec954a6ecc724"},
{file = "wrapt-1.13.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:fdede980273aeca591ad354608778365a3a310e0ecdd7a3587b38bc5be9b1808"},
{file = "wrapt-1.13.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:af9480de8e63c5f959a092047aaf3d7077422ded84695b3398f5d49254af3e90"},
{file = "wrapt-1.13.2-cp38-cp38-win32.whl", hash = "sha256:c65e623ea7556e39c4f0818200a046cbba7575a6b570ff36122c276fdd30ab0a"},
{file = "wrapt-1.13.2-cp38-cp38-win_amd64.whl", hash = "sha256:b20703356cae1799080d0ad15085dc3213c1ac3f45e95afb9f12769b98231528"},
{file = "wrapt-1.13.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c5c4cf188b5643a97e87e2110bbd4f5bc491d54a5b90633837b34d5df6a03fe"},
{file = "wrapt-1.13.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:82223f72eba6f63eafca87a0f614495ae5aa0126fe54947e2b8c023969e9f2d7"},
{file = "wrapt-1.13.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:81a4cf257263b299263472d669692785f9c647e7dca01c18286b8f116dbf6b38"},
{file = "wrapt-1.13.2-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:728e2d9b7a99dd955d3426f237b940fc74017c4a39b125fec913f575619ddfe9"},
{file = "wrapt-1.13.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:7574de567dcd4858a2ffdf403088d6df8738b0e1eabea220553abf7c9048f59e"},
{file = "wrapt-1.13.2-cp39-cp39-win32.whl", hash = "sha256:c7ac2c7a8e34bd06710605b21dd1f3576764443d68e069d2afba9b116014d072"},
{file = "wrapt-1.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e6d1a8eeef415d7fb29fe017de0e48f45e45efd2d1bfda28fc50b7b330859ef"},
{file = "wrapt-1.13.2.tar.gz", hash = "sha256:dca56cc5963a5fd7c2aa8607017753f534ee514e09103a6c55d2db70b50e7447"},
] ]

View file

@ -13,15 +13,20 @@ pydantic = "^1.8.2"
click = "^8.0.1" click = "^8.0.1"
six = "^1.16.0" six = "^1.16.0"
pptree = "^3.1" pptree = "^3.1"
mypy = "^0.910"
types-redis = "^3.5.9" types-redis = "^3.5.9"
types-six = "^1.16.1" types-six = "^1.16.1"
python-ulid = "^1.0.3" python-ulid = "^1.0.3"
pylint = "^2.11.1"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
mypy = "^0.910"
pytest = "^6.2.4" pytest = "^6.2.4"
ipdb = "^0.13.9" ipdb = "^0.13.9"
pylint = "^2.11.1"
black = "^21.9b0"
isort = "^5.9.3"
flake8 = "^4.0.1"
bandit = "^1.7.0"
[tool.poetry.scripts] [tool.poetry.scripts]
migrate = "redis_developer.orm.cli.migrate:migrate" migrate = "redis_developer.orm.cli.migrate:migrate"

View file

@ -1,7 +1 @@
from .model import ( from .model import EmbeddedJsonModel, Field, HashModel, JsonModel, RedisModel
RedisModel,
HashModel,
JsonModel,
EmbeddedJsonModel,
Field
)

View file

@ -1,16 +1,17 @@
import click import click
from redis_developer.model.migrations.migrator import Migrator from redis_developer.model.migrations.migrator import Migrator
@click.command() @click.command()
@click.option("--module", default="redis_developer") @click.option("--module", default="redis_developer")
def migrate(module): def migrate(module):
migrator = Migrator(module) migrator = Migrator(module)
if migrator.migrations: if migrator.migrations:
print("Pending migrations:") print("Pending migrations:")
for migration in migrator.migrations: for migration in migrator.migrations:
print(migration) print(migration)
if input(f"Run migrations? (y/n) ") == "y": if input("Run migrations? (y/n) ") == "y":
migrator.run() migrator.run()

View file

@ -34,6 +34,7 @@ from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
from pydantic import BaseModel from pydantic import BaseModel
from pydantic.json import ENCODERS_BY_TYPE from pydantic.json import ENCODERS_BY_TYPE
SetIntStr = Set[Union[int, str]] SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any] DictIntStrAny = Dict[Union[int, str], Any]

View file

@ -9,20 +9,33 @@ from redis import ResponseError
from redis_developer.connections import get_redis_connection from redis_developer.connections import get_redis_connection
from redis_developer.model.model import model_registry from redis_developer.model.model import model_registry
redis = get_redis_connection() redis = get_redis_connection()
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
import importlib import importlib # noqa: E402
import pkgutil import pkgutil # noqa: E402
class MigrationError(Exception):
pass
def import_submodules(root_module_name: str): def import_submodules(root_module_name: str):
"""Import all submodules of a module, recursively.""" """Import all submodules of a module, recursively."""
# TODO: Call this without specifying a module name, to import everything? # TODO: Call this without specifying a module name, to import everything?
root_module = importlib.import_module(root_module_name) root_module = importlib.import_module(root_module_name)
if not hasattr(root_module, "__path__"):
raise MigrationError(
"The root module must be a Python package. "
f"You specified: {root_module_name}"
)
for loader, module_name, is_pkg in pkgutil.walk_packages( for loader, module_name, is_pkg in pkgutil.walk_packages(
root_module.__path__, root_module.__name__ + '.'): root_module.__path__, root_module.__name__ + "." # type: ignore
):
importlib.import_module(module_name) importlib.import_module(module_name)
@ -77,14 +90,20 @@ class Migrator:
except NotImplementedError: except NotImplementedError:
log.info("Skipping migrations for %s", name) log.info("Skipping migrations for %s", name)
continue continue
current_hash = hashlib.sha1(schema.encode("utf-8")).hexdigest() current_hash = hashlib.sha1(schema.encode("utf-8")).hexdigest() # nosec
try: try:
redis.execute_command("ft.info", cls.Meta.index_name) redis.execute_command("ft.info", cls.Meta.index_name)
except ResponseError: except ResponseError:
self.migrations.append( self.migrations.append(
IndexMigration(name, cls.Meta.index_name, schema, current_hash, IndexMigration(
MigrationAction.CREATE)) name,
cls.Meta.index_name,
schema,
current_hash,
MigrationAction.CREATE,
)
)
continue continue
stored_hash = redis.get(hash_key) stored_hash = redis.get(hash_key)
@ -93,11 +112,25 @@ class Migrator:
if schema_out_of_date: if schema_out_of_date:
# TODO: Switch out schema with an alias to avoid downtime -- separate migration? # TODO: Switch out schema with an alias to avoid downtime -- separate migration?
self.migrations.append( self.migrations.append(
IndexMigration(name, cls.Meta.index_name, schema, current_hash, IndexMigration(
MigrationAction.DROP, stored_hash)) name,
cls.Meta.index_name,
schema,
current_hash,
MigrationAction.DROP,
stored_hash,
)
)
self.migrations.append( self.migrations.append(
IndexMigration(name, cls.Meta.index_name, schema, current_hash, IndexMigration(
MigrationAction.CREATE, stored_hash)) name,
cls.Meta.index_name,
schema,
current_hash,
MigrationAction.CREATE,
stored_hash,
)
)
def run(self): def run(self):
# TODO: Migration history # TODO: Migration history

View file

@ -4,7 +4,7 @@ import decimal
import json import json
import logging import logging
import operator import operator
from copy import deepcopy, copy from copy import copy, deepcopy
from enum import Enum from enum import Enum
from functools import reduce from functools import reduce
from typing import ( from typing import (
@ -12,18 +12,19 @@ from typing import (
Any, Any,
Callable, Callable,
Dict, Dict,
List,
Mapping, Mapping,
Optional, Optional,
Protocol,
Sequence,
Set, Set,
Tuple, Tuple,
Type,
TypeVar, TypeVar,
Union, Union,
Sequence, get_args,
no_type_check,
Protocol,
List,
get_origin, get_origin,
get_args, Type no_type_check,
) )
import redis import redis
@ -40,6 +41,7 @@ from .encoders import jsonable_encoder
from .render_tree import render_tree from .render_tree import render_tree
from .token_escaper import TokenEscaper from .token_escaper import TokenEscaper
model_registry = {} model_registry = {}
_T = TypeVar("_T") _T = TypeVar("_T")
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -92,7 +94,7 @@ class Operators(Enum):
return str(self.name) return str(self.name)
ExpressionOrModelField = Union['Expression', 'NegatedExpression', ModelField] ExpressionOrModelField = Union["Expression", "NegatedExpression", ModelField]
def embedded(cls): def embedded(cls):
@ -100,20 +102,22 @@ def embedded(cls):
Mark a model as embedded to avoid creating multiple indexes if the model is Mark a model as embedded to avoid creating multiple indexes if the model is
only ever used embedded within other models. only ever used embedded within other models.
""" """
setattr(cls.Meta, 'embedded', True) setattr(cls.Meta, "embedded", True)
def is_supported_container_type(typ: type) -> bool: def is_supported_container_type(typ: Optional[type]) -> bool:
if typ == list or typ == tuple: if typ == list or typ == tuple:
return True return True
unwrapped = get_origin(typ) unwrapped = get_origin(typ)
return unwrapped == list or unwrapped == tuple return unwrapped == list or unwrapped == tuple
def validate_model_fields(model: Type['RedisModel'], field_values: Dict[str, Any]): def validate_model_fields(model: Type["RedisModel"], field_values: Dict[str, Any]):
for field_name in field_values.keys(): for field_name in field_values.keys():
if field_name not in model.__fields__: if field_name not in model.__fields__:
raise QuerySyntaxError(f"The field {field_name} does not exist on the model {self.model}") raise QuerySyntaxError(
f"The field {field_name} does not exist on the model {model.__name__}"
)
class ExpressionProtocol(Protocol): class ExpressionProtocol(Protocol):
@ -121,7 +125,7 @@ class ExpressionProtocol(Protocol):
left: ExpressionOrModelField left: ExpressionOrModelField
right: ExpressionOrModelField right: ExpressionOrModelField
def __invert__(self) -> 'Expression': def __invert__(self) -> "Expression":
pass pass
def __and__(self, other: ExpressionOrModelField): def __and__(self, other: ExpressionOrModelField):
@ -148,16 +152,21 @@ class NegatedExpression:
responsible for querying) to negate the logic in the wrapped Expression. A responsible for querying) to negate the logic in the wrapped Expression. A
better design is probably possible, maybe at least an ExpressionProtocol? better design is probably possible, maybe at least an ExpressionProtocol?
""" """
expression: 'Expression'
expression: "Expression"
def __invert__(self): def __invert__(self):
return self.expression return self.expression
def __and__(self, other): def __and__(self, other):
return Expression(left=self, op=Operators.AND, right=other, parents=self.expression.parents) return Expression(
left=self, op=Operators.AND, right=other, parents=self.expression.parents
)
def __or__(self, other): def __or__(self, other):
return Expression(left=self, op=Operators.OR, right=other, parents=self.expression.parents) return Expression(
left=self, op=Operators.OR, right=other, parents=self.expression.parents
)
@property @property
def left(self): def left(self):
@ -188,13 +197,15 @@ class Expression:
op: Operators op: Operators
left: Optional[ExpressionOrModelField] left: Optional[ExpressionOrModelField]
right: Optional[ExpressionOrModelField] right: Optional[ExpressionOrModelField]
parents: List[Tuple[str, 'RedisModel']] parents: List[Tuple[str, "RedisModel"]]
def __invert__(self): def __invert__(self):
return NegatedExpression(self) return NegatedExpression(self)
def __and__(self, other: ExpressionOrModelField): def __and__(self, other: ExpressionOrModelField):
return Expression(left=self, op=Operators.AND, right=other, parents=self.parents) return Expression(
left=self, op=Operators.AND, right=other, parents=self.parents
)
def __or__(self, other: ExpressionOrModelField): def __or__(self, other: ExpressionOrModelField):
return Expression(left=self, op=Operators.OR, right=other, parents=self.parents) return Expression(left=self, op=Operators.OR, right=other, parents=self.parents)
@ -212,41 +223,59 @@ ExpressionOrNegated = Union[Expression, NegatedExpression]
class ExpressionProxy: class ExpressionProxy:
def __init__(self, field: ModelField, parents: List[Tuple[str, 'RedisModel']]): def __init__(self, field: ModelField, parents: List[Tuple[str, "RedisModel"]]):
self.field = field self.field = field
self.parents = parents self.parents = parents
def __eq__(self, other: Any) -> Expression: # type: ignore[override] def __eq__(self, other: Any) -> Expression: # type: ignore[override]
return Expression(left=self.field, op=Operators.EQ, right=other, parents=self.parents) return Expression(
left=self.field, op=Operators.EQ, right=other, parents=self.parents
)
def __ne__(self, other: Any) -> Expression: # type: ignore[override] def __ne__(self, other: Any) -> Expression: # type: ignore[override]
return Expression(left=self.field, op=Operators.NE, right=other, parents=self.parents) return Expression(
left=self.field, op=Operators.NE, right=other, parents=self.parents
)
def __lt__(self, other: Any) -> Expression: def __lt__(self, other: Any) -> Expression:
return Expression(left=self.field, op=Operators.LT, right=other, parents=self.parents) return Expression(
left=self.field, op=Operators.LT, right=other, parents=self.parents
)
def __le__(self, other: Any) -> Expression: def __le__(self, other: Any) -> Expression:
return Expression(left=self.field, op=Operators.LE, right=other, parents=self.parents) return Expression(
left=self.field, op=Operators.LE, right=other, parents=self.parents
)
def __gt__(self, other: Any) -> Expression: def __gt__(self, other: Any) -> Expression:
return Expression(left=self.field, op=Operators.GT, right=other, parents=self.parents) return Expression(
left=self.field, op=Operators.GT, right=other, parents=self.parents
)
def __ge__(self, other: Any) -> Expression: def __ge__(self, other: Any) -> Expression:
return Expression(left=self.field, op=Operators.GE, right=other, parents=self.parents) return Expression(
left=self.field, op=Operators.GE, right=other, parents=self.parents
)
def __mod__(self, other: Any) -> Expression: def __mod__(self, other: Any) -> Expression:
return Expression(left=self.field, op=Operators.LIKE, right=other, parents=self.parents) return Expression(
left=self.field, op=Operators.LIKE, right=other, parents=self.parents
)
def __lshift__(self, other: Any) -> Expression: def __lshift__(self, other: Any) -> Expression:
return Expression(left=self.field, op=Operators.IN, right=other, parents=self.parents) return Expression(
left=self.field, op=Operators.IN, right=other, parents=self.parents
)
def __getattr__(self, item): def __getattr__(self, item):
if is_supported_container_type(self.field.outer_type_): if is_supported_container_type(self.field.outer_type_):
embedded_cls = get_args(self.field.outer_type_) embedded_cls = get_args(self.field.outer_type_)
if not embedded_cls: if not embedded_cls:
raise QuerySyntaxError("In order to query on a list field, you must define " raise QuerySyntaxError(
"the contents of the list with a type annotation, like: " "In order to query on a list field, you must define "
"orders: List[Order]. Docs: TODO") "the contents of the list with a type annotation, like: "
"orders: List[Order]. Docs: TODO"
)
embedded_cls = embedded_cls[0] embedded_cls = embedded_cls[0]
attr = getattr(embedded_cls, item) attr = getattr(embedded_cls, item)
else: else:
@ -266,10 +295,10 @@ class QueryNotSupportedError(Exception):
class RediSearchFieldTypes(Enum): class RediSearchFieldTypes(Enum):
TEXT = 'TEXT' TEXT = "TEXT"
TAG = 'TAG' TAG = "TAG"
NUMERIC = 'NUMERIC' NUMERIC = "NUMERIC"
GEO = 'GEO' GEO = "GEO"
# TODO: How to handle Geo fields? # TODO: How to handle Geo fields?
@ -278,13 +307,15 @@ DEFAULT_PAGE_SIZE = 10
class FindQuery: class FindQuery:
def __init__(self, def __init__(
expressions: Sequence[ExpressionOrNegated], self,
model: Type['RedisModel'], expressions: Sequence[ExpressionOrNegated],
offset: int = 0, model: Type["RedisModel"],
limit: int = DEFAULT_PAGE_SIZE, offset: int = 0,
page_size: int = DEFAULT_PAGE_SIZE, limit: int = DEFAULT_PAGE_SIZE,
sort_fields: Optional[List[str]] = None): page_size: int = DEFAULT_PAGE_SIZE,
sort_fields: Optional[List[str]] = None,
):
self.expressions = expressions self.expressions = expressions
self.model = model self.model = model
self.offset = offset self.offset = offset
@ -308,7 +339,7 @@ class FindQuery:
page_size=self.page_size, page_size=self.page_size,
limit=self.limit, limit=self.limit,
expressions=copy(self.expressions), expressions=copy(self.expressions),
sort_fields=copy(self.sort_fields) sort_fields=copy(self.sort_fields),
) )
def copy(self, **kwargs): def copy(self, **kwargs):
@ -330,7 +361,9 @@ class FindQuery:
if self.expressions: if self.expressions:
self._expression = reduce(operator.and_, self.expressions) self._expression = reduce(operator.and_, self.expressions)
else: else:
self._expression = Expression(left=None, right=None, op=Operators.ALL, parents=[]) self._expression = Expression(
left=None, right=None, op=Operators.ALL, parents=[]
)
return self._expression return self._expression
@property @property
@ -350,24 +383,30 @@ class FindQuery:
for sort_field in sort_fields: for sort_field in sort_fields:
field_name = sort_field.lstrip("-") field_name = sort_field.lstrip("-")
if field_name not in self.model.__fields__: if field_name not in self.model.__fields__:
raise QueryNotSupportedError(f"You tried sort by {field_name}, but that field " raise QueryNotSupportedError(
f"does not exist on the model {self.model}") f"You tried sort by {field_name}, but that field "
f"does not exist on the model {self.model}"
)
field_proxy = getattr(self.model, field_name) field_proxy = getattr(self.model, field_name)
if not getattr(field_proxy.field.field_info, 'sortable', False): if not getattr(field_proxy.field.field_info, "sortable", False):
raise QueryNotSupportedError(f"You tried sort by {field_name}, but {self.model} does " raise QueryNotSupportedError(
"not define that field as sortable. See docs: XXX") f"You tried sort by {field_name}, but {self.model} does "
"not define that field as sortable. See docs: XXX"
)
return sort_fields return sort_fields
@staticmethod @staticmethod
def resolve_field_type(field: ModelField, op: Operators) -> RediSearchFieldTypes: def resolve_field_type(field: ModelField, op: Operators) -> RediSearchFieldTypes:
if getattr(field.field_info, 'primary_key', None) is True: if getattr(field.field_info, "primary_key", None) is True:
return RediSearchFieldTypes.TAG return RediSearchFieldTypes.TAG
elif op is Operators.LIKE: elif op is Operators.LIKE:
fts = getattr(field.field_info, 'full_text_search', None) fts = getattr(field.field_info, "full_text_search", None)
if fts is not True: # Could be PydanticUndefined if fts is not True: # Could be PydanticUndefined
raise QuerySyntaxError(f"You tried to do a full-text search on the field '{field.name}', " raise QuerySyntaxError(
f"but the field is not indexed for full-text search. Use the " f"You tried to do a full-text search on the field '{field.name}', "
f"full_text_search=True option. Docs: TODO") f"but the field is not indexed for full-text search. Use the "
f"full_text_search=True option. Docs: TODO"
)
return RediSearchFieldTypes.TEXT return RediSearchFieldTypes.TEXT
field_type = field.outer_type_ field_type = field.outer_type_
@ -391,8 +430,10 @@ class FindQuery:
# within the model inside the list marked as `index=True`. # within the model inside the list marked as `index=True`.
return RediSearchFieldTypes.TAG return RediSearchFieldTypes.TAG
elif container_type is not None: elif container_type is not None:
raise QuerySyntaxError("Only lists and tuples are supported for multi-value fields. " raise QuerySyntaxError(
"See docs: TODO") "Only lists and tuples are supported for multi-value fields. "
"See docs: TODO"
)
elif any(issubclass(field_type, t) for t in NUMERIC_TYPES): elif any(issubclass(field_type, t) for t in NUMERIC_TYPES):
# Index numeric Python types as NUMERIC fields, so we can support # Index numeric Python types as NUMERIC fields, so we can support
# range queries. # range queries.
@ -419,14 +460,23 @@ class FindQuery:
try: try:
return "|".join([escaper.escape(str(v)) for v in value]) return "|".join([escaper.escape(str(v)) for v in value])
except TypeError: except TypeError:
log.debug("Escaping single non-iterable value used for an IN or " log.debug(
"NOT_IN query: %s", value) "Escaping single non-iterable value used for an IN or "
"NOT_IN query: %s",
value,
)
return escaper.escape(str(value)) return escaper.escape(str(value))
@classmethod @classmethod
def resolve_value(cls, field_name: str, field_type: RediSearchFieldTypes, def resolve_value(
field_info: PydanticFieldInfo, op: Operators, value: Any, cls,
parents: List[Tuple[str, 'RedisModel']]) -> str: field_name: str,
field_type: RediSearchFieldTypes,
field_info: PydanticFieldInfo,
op: Operators,
value: Any,
parents: List[Tuple[str, "RedisModel"]],
) -> str:
if parents: if parents:
prefix = "_".join([p[0] for p in parents]) prefix = "_".join([p[0] for p in parents])
field_name = f"{prefix}_{field_name}" field_name = f"{prefix}_{field_name}"
@ -440,9 +490,11 @@ class FindQuery:
elif op is Operators.LIKE: elif op is Operators.LIKE:
result += value result += value
else: else:
raise QueryNotSupportedError("Only equals (=), not-equals (!=), and like() " raise QueryNotSupportedError(
"comparisons are supported for TEXT fields. See " "Only equals (=), not-equals (!=), and like() "
"docs: TODO.") "comparisons are supported for TEXT fields. See "
"docs: TODO."
)
elif field_type is RediSearchFieldTypes.NUMERIC: elif field_type is RediSearchFieldTypes.NUMERIC:
if op is Operators.EQ: if op is Operators.EQ:
result += f"@{field_name}:[{value} {value}]" result += f"@{field_name}:[{value} {value}]"
@ -460,16 +512,22 @@ class FindQuery:
# field and our hidden use of TAG for exact-match queries? # field and our hidden use of TAG for exact-match queries?
elif field_type is RediSearchFieldTypes.TAG: elif field_type is RediSearchFieldTypes.TAG:
if op is Operators.EQ: if op is Operators.EQ:
separator_char = getattr(field_info, 'separator', separator_char = getattr(
SINGLE_VALUE_TAG_FIELD_SEPARATOR) field_info, "separator", SINGLE_VALUE_TAG_FIELD_SEPARATOR
)
if value == separator_char: if value == separator_char:
# The value is ONLY the TAG field separator character -- # The value is ONLY the TAG field separator character --
# this is not going to work. # this is not going to work.
log.warning("Your query against the field %s is for a single character, %s, " log.warning(
"that is used internally by redis-developer-python. We must ignore " "Your query against the field %s is for a single character, %s, "
"this portion of the query. Please review your query to find " "that is used internally by redis-developer-python. We must ignore "
"an alternative query that uses a string containing more than " "this portion of the query. Please review your query to find "
"just the character %s.", field_name, separator_char, separator_char) "an alternative query that uses a string containing more than "
"just the character %s.",
field_name,
separator_char,
separator_char,
)
return "" return ""
if separator_char in value: if separator_char in value:
# The value contains the TAG field separator. We can work # The value contains the TAG field separator. We can work
@ -506,8 +564,8 @@ class FindQuery:
return return
fields = [] fields = []
for f in self.sort_fields: for f in self.sort_fields:
direction = "desc" if f.startswith('-') else 'asc' direction = "desc" if f.startswith("-") else "asc"
fields.extend([f.lstrip('-'), direction]) fields.extend([f.lstrip("-"), direction])
if self.sort_fields: if self.sort_fields:
return ["SORTBY", *fields] return ["SORTBY", *fields]
@ -550,23 +608,30 @@ class FindQuery:
if encompassing_expression_is_negated: if encompassing_expression_is_negated:
# TODO: Is there a use case for this, perhaps for dynamic # TODO: Is there a use case for this, perhaps for dynamic
# scoring purposes with full-text search? # scoring purposes with full-text search?
raise QueryNotSupportedError("You cannot negate a query for all results.") raise QueryNotSupportedError(
"You cannot negate a query for all results."
)
return "*" return "*"
if isinstance(expression.left, Expression) or \ if isinstance(expression.left, Expression) or isinstance(
isinstance(expression.left, NegatedExpression): expression.left, NegatedExpression
):
result += f"({cls.resolve_redisearch_query(expression.left)})" result += f"({cls.resolve_redisearch_query(expression.left)})"
elif isinstance(expression.left, ModelField): elif isinstance(expression.left, ModelField):
field_type = cls.resolve_field_type(expression.left, expression.op) field_type = cls.resolve_field_type(expression.left, expression.op)
field_name = expression.left.name field_name = expression.left.name
field_info = expression.left.field_info field_info = expression.left.field_info
if not field_info or not getattr(field_info, "index", None): if not field_info or not getattr(field_info, "index", None):
raise QueryNotSupportedError(f"You tried to query by a field ({field_name}) " raise QueryNotSupportedError(
f"that isn't indexed. See docs: TODO") f"You tried to query by a field ({field_name}) "
f"that isn't indexed. See docs: TODO"
)
else: else:
raise QueryNotSupportedError(f"A query expression should start with either a field " raise QueryNotSupportedError(
f"or an expression enclosed in parenthesis. See docs: " "A query expression should start with either a field "
f"TODO") "or an expression enclosed in parenthesis. See docs: "
"TODO"
)
right = expression.right right = expression.right
@ -576,8 +641,10 @@ class FindQuery:
elif expression.op == Operators.OR: elif expression.op == Operators.OR:
result += "| " result += "| "
else: else:
raise QueryNotSupportedError("You can only combine two query expressions with" raise QueryNotSupportedError(
"AND (&) or OR (|). See docs: TODO") "You can only combine two query expressions with"
"AND (&) or OR (|). See docs: TODO"
)
if isinstance(right, NegatedExpression): if isinstance(right, NegatedExpression):
result += "-" result += "-"
@ -594,10 +661,18 @@ class FindQuery:
elif not field_info: elif not field_info:
raise QuerySyntaxError("Could not resolve field info. See docs: TODO") raise QuerySyntaxError("Could not resolve field info. See docs: TODO")
elif isinstance(right, ModelField): elif isinstance(right, ModelField):
raise QueryNotSupportedError("Comparing fields is not supported. See docs: TODO") raise QueryNotSupportedError(
"Comparing fields is not supported. See docs: TODO"
)
else: else:
result += cls.resolve_value(field_name, field_type, field_info, result += cls.resolve_value(
expression.op, right, expression.parents) field_name,
field_type,
field_info,
expression.op,
right,
expression.parents,
)
if encompassing_expression_is_negated: if encompassing_expression_is_negated:
result = f"-({result})" result = f"-({result})"
@ -658,7 +733,7 @@ class FindQuery:
return self return self
return self.copy(sort_fields=list(fields)) return self.copy(sort_fields=list(fields))
def update(self, use_transaction=True, **field_values) -> Optional[List[str]]: def update(self, use_transaction=True, **field_values):
""" """
Update models that match this query to the given field-value pairs. Update models that match this query to the given field-value pairs.
@ -672,11 +747,14 @@ class FindQuery:
for model in self.all(): for model in self.all():
for field, value in field_values.items(): for field, value in field_values.items():
setattr(model, field, value) setattr(model, field, value)
# TODO: In the non-transaction case, can we do more to detect
# failure responses from Redis?
model.save(pipeline=pipeline) model.save(pipeline=pipeline)
if pipeline: if pipeline:
# TODO: Better response type, error detection # TODO: Response type?
return pipeline.execute() # TODO: Better error detection for transactions.
pipeline.execute()
def delete(self): def delete(self):
"""Delete all matching records in this query.""" """Delete all matching records in this query."""
@ -720,8 +798,9 @@ class PrimaryKeyCreator(Protocol):
class UlidPrimaryKey: class UlidPrimaryKey:
"""A client-side generated primary key that follows the ULID spec. """A client-side generated primary key that follows the ULID spec.
https://github.com/ulid/javascript#specification https://github.com/ulid/javascript#specification
""" """
@staticmethod @staticmethod
def create_pk(*args, **kwargs) -> str: def create_pk(*args, **kwargs) -> str:
return str(ULID()) return str(ULID())
@ -848,6 +927,7 @@ class DefaultMeta:
TODO: Revisit whether this is really necessary, and whether making TODO: Revisit whether this is really necessary, and whether making
these all optional here is the right choice. these all optional here is the right choice.
""" """
global_key_prefix: Optional[str] = None global_key_prefix: Optional[str] = None
model_key_prefix: Optional[str] = None model_key_prefix: Optional[str] = None
primary_key_pattern: Optional[str] = None primary_key_pattern: Optional[str] = None
@ -863,28 +943,32 @@ class ModelMeta(ModelMetaclass):
_meta: MetaProtocol _meta: MetaProtocol
def __new__(cls, name, bases, attrs, **kwargs): # noqa C901 def __new__(cls, name, bases, attrs, **kwargs): # noqa C901
meta = attrs.pop('Meta', None) meta = attrs.pop("Meta", None)
new_class = super().__new__(cls, name, bases, attrs, **kwargs) new_class = super().__new__(cls, name, bases, attrs, **kwargs)
# The fact that there is a Meta field and _meta field is important: a # The fact that there is a Meta field and _meta field is important: a
# user may have given us a Meta object with their configuration, while # user may have given us a Meta object with their configuration, while
# we might have inherited _meta from a parent class, and should # we might have inherited _meta from a parent class, and should
# therefore use some of the inherited fields. # therefore use some of the inherited fields.
meta = meta or getattr(new_class, 'Meta', None) meta = meta or getattr(new_class, "Meta", None)
base_meta = getattr(new_class, '_meta', None) base_meta = getattr(new_class, "_meta", None)
if meta and meta != DefaultMeta and meta != base_meta: if meta and meta != DefaultMeta and meta != base_meta:
new_class.Meta = meta new_class.Meta = meta
new_class._meta = meta new_class._meta = meta
elif base_meta: elif base_meta:
new_class._meta = type(f'{new_class.__name__}Meta', (base_meta,), dict(base_meta.__dict__)) new_class._meta = type(
f"{new_class.__name__}Meta", (base_meta,), dict(base_meta.__dict__)
)
new_class.Meta = new_class._meta new_class.Meta = new_class._meta
# Unset inherited values we don't want to reuse (typically based on # Unset inherited values we don't want to reuse (typically based on
# the model name). # the model name).
new_class._meta.model_key_prefix = None new_class._meta.model_key_prefix = None
new_class._meta.index_name = None new_class._meta.index_name = None
else: else:
new_class._meta = type(f'{new_class.__name__}Meta', (DefaultMeta,), dict(DefaultMeta.__dict__)) new_class._meta = type(
f"{new_class.__name__}Meta", (DefaultMeta,), dict(DefaultMeta.__dict__)
)
new_class.Meta = new_class._meta new_class.Meta = new_class._meta
# Create proxies for each model field so that we can use the field # Create proxies for each model field so that we can use the field
@ -894,29 +978,40 @@ class ModelMeta(ModelMetaclass):
# Check if this is our FieldInfo version with extended ORM metadata. # Check if this is our FieldInfo version with extended ORM metadata.
if isinstance(field.field_info, FieldInfo): if isinstance(field.field_info, FieldInfo):
if field.field_info.primary_key: if field.field_info.primary_key:
new_class._meta.primary_key = PrimaryKey(name=field_name, field=field) new_class._meta.primary_key = PrimaryKey(
name=field_name, field=field
)
if not getattr(new_class._meta, 'global_key_prefix', None): if not getattr(new_class._meta, "global_key_prefix", None):
new_class._meta.global_key_prefix = getattr(base_meta, "global_key_prefix", "") new_class._meta.global_key_prefix = getattr(
if not getattr(new_class._meta, 'model_key_prefix', None): base_meta, "global_key_prefix", ""
)
if not getattr(new_class._meta, "model_key_prefix", None):
# Don't look at the base class for this. # Don't look at the base class for this.
new_class._meta.model_key_prefix = f"{new_class.__module__}.{new_class.__name__}" new_class._meta.model_key_prefix = (
if not getattr(new_class._meta, 'primary_key_pattern', None): f"{new_class.__module__}.{new_class.__name__}"
new_class._meta.primary_key_pattern = getattr(base_meta, "primary_key_pattern", )
"{pk}") if not getattr(new_class._meta, "primary_key_pattern", None):
if not getattr(new_class._meta, 'database', None): new_class._meta.primary_key_pattern = getattr(
new_class._meta.database = getattr(base_meta, "database", base_meta, "primary_key_pattern", "{pk}"
redis.Redis(decode_responses=True)) )
if not getattr(new_class._meta, 'primary_key_creator_cls', None): if not getattr(new_class._meta, "database", None):
new_class._meta.primary_key_creator_cls = getattr(base_meta, "primary_key_creator_cls", new_class._meta.database = getattr(
UlidPrimaryKey) base_meta, "database", redis.Redis(decode_responses=True)
if not getattr(new_class._meta, 'index_name', None): )
new_class._meta.index_name = f"{new_class._meta.global_key_prefix}:" \ if not getattr(new_class._meta, "primary_key_creator_cls", None):
f"{new_class._meta.model_key_prefix}:index" new_class._meta.primary_key_creator_cls = getattr(
base_meta, "primary_key_creator_cls", UlidPrimaryKey
)
if not getattr(new_class._meta, "index_name", None):
new_class._meta.index_name = (
f"{new_class._meta.global_key_prefix}:"
f"{new_class._meta.model_key_prefix}:index"
)
# Not an abstract model class or embedded model, so we should let the # Not an abstract model class or embedded model, so we should let the
# Migrator create indexes for it. # Migrator create indexes for it.
if abc.ABC not in bases and not getattr(new_class._meta, 'embedded', False): if abc.ABC not in bases and not getattr(new_class._meta, "embedded", False):
key = f"{new_class.__module__}.{new_class.__qualname__}" key = f"{new_class.__module__}.{new_class.__qualname__}"
model_registry[key] = new_class model_registry[key] = new_class
@ -931,7 +1026,7 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
class Config: class Config:
orm_mode = True orm_mode = True
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = 'allow' extra = "allow"
def __init__(__pydantic_self__, **data: Any) -> None: def __init__(__pydantic_self__, **data: Any) -> None:
super().__init__(**data) super().__init__(**data)
@ -953,7 +1048,7 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
"""Update this model instance with the specified key-value pairs.""" """Update this model instance with the specified key-value pairs."""
raise NotImplementedError raise NotImplementedError
def save(self, *args, **kwargs) -> 'RedisModel': def save(self, pipeline: Optional[Pipeline] = None) -> "RedisModel":
raise NotImplementedError raise NotImplementedError
@validator("pk", always=True) @validator("pk", always=True)
@ -967,7 +1062,7 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
"""Check for a primary key. We need one (and only one).""" """Check for a primary key. We need one (and only one)."""
primary_keys = 0 primary_keys = 0
for name, field in cls.__fields__.items(): for name, field in cls.__fields__.items():
if getattr(field.field_info, 'primary_key', None): if getattr(field.field_info, "primary_key", None):
primary_keys += 1 primary_keys += 1
if primary_keys == 0: if primary_keys == 0:
raise RedisModelError("You must define a primary key for the model") raise RedisModelError("You must define a primary key for the model")
@ -976,8 +1071,8 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
@classmethod @classmethod
def make_key(cls, part: str): def make_key(cls, part: str):
global_prefix = getattr(cls._meta, 'global_key_prefix', '').strip(":") global_prefix = getattr(cls._meta, "global_key_prefix", "").strip(":")
model_prefix = getattr(cls._meta, 'model_key_prefix', '').strip(":") model_prefix = getattr(cls._meta, "model_key_prefix", "").strip(":")
return f"{global_prefix}:{model_prefix}:{part}" return f"{global_prefix}:{model_prefix}:{part}"
@classmethod @classmethod
@ -997,13 +1092,14 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
def from_redis(cls, res: Any): def from_redis(cls, res: Any):
# TODO: Parsing logic copied from redisearch-py. Evaluate. # TODO: Parsing logic copied from redisearch-py. Evaluate.
import six import six
from six.moves import xrange, zip as izip from six.moves import xrange
from six.moves import zip as izip
def to_string(s): def to_string(s):
if isinstance(s, six.string_types): if isinstance(s, six.string_types):
return s return s
elif isinstance(s, six.binary_type): elif isinstance(s, six.binary_type):
return s.decode('utf-8', 'ignore') return s.decode("utf-8", "ignore")
else: else:
return s # Not a string we care about return s # Not a string we care about
@ -1015,23 +1111,27 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
fields_offset = offset fields_offset = offset
fields = dict( fields = dict(
dict(izip(map(to_string, res[i + fields_offset][::2]), dict(
map(to_string, res[i + fields_offset][1::2]))) izip(
map(to_string, res[i + fields_offset][::2]),
map(to_string, res[i + fields_offset][1::2]),
)
)
) )
try: try:
del fields['id'] del fields["id"]
except KeyError: except KeyError:
pass pass
try: try:
fields['json'] = fields['$'] fields["json"] = fields["$"]
del fields['$'] del fields["$"]
except KeyError: except KeyError:
pass pass
if 'json' in fields: if "json" in fields:
json_fields = json.loads(fields['json']) json_fields = json.loads(fields["json"])
doc = cls(**json_fields) doc = cls(**json_fields)
else: else:
doc = cls(**fields) doc = cls(**fields)
@ -1039,7 +1139,7 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
return docs return docs
@classmethod @classmethod
def add(cls, models: Sequence['RedisModel']) -> Sequence['RedisModel']: def add(cls, models: Sequence["RedisModel"]) -> Sequence["RedisModel"]:
# TODO: Add transaction support # TODO: Add transaction support
return [model.save() for model in models] return [model.save() for model in models]
@ -1059,15 +1159,18 @@ class HashModel(RedisModel, abc.ABC):
for name, field in cls.__fields__.items(): for name, field in cls.__fields__.items():
if issubclass(field.outer_type_, RedisModel): if issubclass(field.outer_type_, RedisModel):
raise RedisModelError(f"HashModels cannot have embedded model " raise RedisModelError(
f"fields. Field: {name}") f"HashModels cannot have embedded model " f"fields. Field: {name}"
)
for typ in (Set, Mapping, List): for typ in (Set, Mapping, List):
if issubclass(field.outer_type_, typ): if issubclass(field.outer_type_, typ):
raise RedisModelError(f"HashModels cannot have set, list," raise RedisModelError(
f" or mapping fields. Field: {name}") f"HashModels cannot have set, list,"
f" or mapping fields. Field: {name}"
)
def save(self, pipeline: Optional[Pipeline] = None) -> 'HashModel': def save(self, pipeline: Optional[Pipeline] = None) -> "HashModel":
if pipeline is None: if pipeline is None:
db = self.db() db = self.db()
else: else:
@ -1077,7 +1180,7 @@ class HashModel(RedisModel, abc.ABC):
return self return self
@classmethod @classmethod
def get(cls, pk: Any) -> 'HashModel': def get(cls, pk: Any) -> "HashModel":
document = cls.db().hgetall(cls.make_primary_key(pk)) document = cls.db().hgetall(cls.make_primary_key(pk))
if not document: if not document:
raise NotFoundError raise NotFoundError
@ -1111,13 +1214,17 @@ class HashModel(RedisModel, abc.ABC):
for name, field in cls.__fields__.items(): for name, field in cls.__fields__.items():
# TODO: Merge this code with schema_for_type()? # TODO: Merge this code with schema_for_type()?
_type = field.outer_type_ _type = field.outer_type_
if getattr(field.field_info, 'primary_key', None): if getattr(field.field_info, "primary_key", None):
if issubclass(_type, str): if issubclass(_type, str):
redisearch_field = f"{name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR}" redisearch_field = (
f"{name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR}"
)
else: else:
redisearch_field = cls.schema_for_type(name, _type, field.field_info) redisearch_field = cls.schema_for_type(
name, _type, field.field_info
)
schema_parts.append(redisearch_field) schema_parts.append(redisearch_field)
elif getattr(field.field_info, 'index', None) is True: elif getattr(field.field_info, "index", None) is True:
schema_parts.append(cls.schema_for_type(name, _type, field.field_info)) schema_parts.append(cls.schema_for_type(name, _type, field.field_info))
elif is_supported_container_type(_type): elif is_supported_container_type(_type):
embedded_cls = get_args(_type) embedded_cls = get_args(_type)
@ -1126,8 +1233,9 @@ class HashModel(RedisModel, abc.ABC):
log.warning("Model %s defined an empty list field: %s", cls, name) log.warning("Model %s defined an empty list field: %s", cls, name)
continue continue
embedded_cls = embedded_cls[0] embedded_cls = embedded_cls[0]
schema_parts.append(cls.schema_for_type(name, embedded_cls, schema_parts.append(
field.field_info)) cls.schema_for_type(name, embedded_cls, field.field_info)
)
elif issubclass(_type, RedisModel): elif issubclass(_type, RedisModel):
schema_parts.append(cls.schema_for_type(name, _type, field.field_info)) schema_parts.append(cls.schema_for_type(name, _type, field.field_info))
return schema_parts return schema_parts
@ -1141,29 +1249,36 @@ class HashModel(RedisModel, abc.ABC):
# as sortable. # as sortable.
# TODO: Abstract string-building logic for each type (TAG, etc.) into # TODO: Abstract string-building logic for each type (TAG, etc.) into
# classes that take a field name. # classes that take a field name.
sortable = getattr(field_info, 'sortable', False) sortable = getattr(field_info, "sortable", False)
if is_supported_container_type(typ): if is_supported_container_type(typ):
embedded_cls = get_args(typ) embedded_cls = get_args(typ)
if not embedded_cls: if not embedded_cls:
# TODO: Test if this can really happen. # TODO: Test if this can really happen.
log.warning("Model %s defined an empty list or tuple field: %s", cls, name) log.warning(
"Model %s defined an empty list or tuple field: %s", cls, name
)
return "" return ""
embedded_cls = embedded_cls[0] embedded_cls = embedded_cls[0]
schema = cls.schema_for_type(name, embedded_cls, field_info) schema = cls.schema_for_type(name, embedded_cls, field_info)
elif any(issubclass(typ, t) for t in NUMERIC_TYPES): elif any(issubclass(typ, t) for t in NUMERIC_TYPES):
schema = f"{name} NUMERIC" schema = f"{name} NUMERIC"
elif issubclass(typ, str): elif issubclass(typ, str):
if getattr(field_info, 'full_text_search', False) is True: if getattr(field_info, "full_text_search", False) is True:
schema = f"{name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR} " \ schema = (
f"{name}_fts TEXT" f"{name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR} "
f"{name}_fts TEXT"
)
else: else:
schema = f"{name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR}" schema = f"{name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR}"
elif issubclass(typ, RedisModel): elif issubclass(typ, RedisModel):
sub_fields = [] sub_fields = []
for embedded_name, field in typ.__fields__.items(): for embedded_name, field in typ.__fields__.items():
sub_fields.append(cls.schema_for_type(f"{name}_{embedded_name}", field.outer_type_, sub_fields.append(
field.field_info)) cls.schema_for_type(
f"{name}_{embedded_name}", field.outer_type_, field.field_info
)
)
schema = " ".join(sub_fields) schema = " ".join(sub_fields)
else: else:
schema = f"{name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR}" schema = f"{name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR}"
@ -1177,12 +1292,12 @@ class JsonModel(RedisModel, abc.ABC):
# Generate the RediSearch schema once to validate fields. # Generate the RediSearch schema once to validate fields.
cls.redisearch_schema() cls.redisearch_schema()
def save(self, pipeline: Optional[Pipeline] = None) -> 'JsonModel': def save(self, pipeline: Optional[Pipeline] = None) -> "JsonModel":
if pipeline is None: if pipeline is None:
db = self.db() db = self.db()
else: else:
db = pipeline db = pipeline
db.execute_command('JSON.SET', self.key(), ".", self.json()) db.execute_command("JSON.SET", self.key(), ".", self.json())
return self return self
def update(self, **field_values): def update(self, **field_values):
@ -1192,7 +1307,7 @@ class JsonModel(RedisModel, abc.ABC):
self.save() self.save()
@classmethod @classmethod
def get(cls, pk: Any) -> 'JsonModel': def get(cls, pk: Any) -> "JsonModel":
document = cls.db().execute_command("JSON.GET", cls.make_primary_key(pk)) document = cls.db().execute_command("JSON.GET", cls.make_primary_key(pk))
if not document: if not document:
raise NotFoundError raise NotFoundError
@ -1212,21 +1327,31 @@ class JsonModel(RedisModel, abc.ABC):
for name, field in cls.__fields__.items(): for name, field in cls.__fields__.items():
_type = field.outer_type_ _type = field.outer_type_
schema_parts.append(cls.schema_for_type( schema_parts.append(
json_path, name, "", _type, field.field_info)) cls.schema_for_type(json_path, name, "", _type, field.field_info)
)
return schema_parts return schema_parts
@classmethod @classmethod
def schema_for_type(cls, json_path: str, name: str, name_prefix: str, typ: Any, def schema_for_type(
field_info: PydanticFieldInfo, cls,
parent_type: Optional[Any] = None) -> str: json_path: str,
should_index = getattr(field_info, 'index', False) name: str,
name_prefix: str,
typ: Any,
field_info: PydanticFieldInfo,
parent_type: Optional[Any] = None,
) -> str:
should_index = getattr(field_info, "index", False)
is_container_type = is_supported_container_type(typ) is_container_type = is_supported_container_type(typ)
parent_is_container_type = is_supported_container_type(parent_type) parent_is_container_type = is_supported_container_type(parent_type)
try: parent_is_model = False
parent_is_model = issubclass(parent_type, RedisModel)
except TypeError: if parent_type:
parent_is_model = False try:
parent_is_model = issubclass(parent_type, RedisModel)
except TypeError:
pass
# TODO: We need a better way to know that we're indexing a value # TODO: We need a better way to know that we're indexing a value
# discovered in a model within an array. # discovered in a model within an array.
@ -1253,11 +1378,19 @@ class JsonModel(RedisModel, abc.ABC):
field_type = get_origin(typ) field_type = get_origin(typ)
embedded_cls = get_args(typ) embedded_cls = get_args(typ)
if not embedded_cls: if not embedded_cls:
log.warning("Model %s defined an empty list or tuple field: %s", cls, name) log.warning(
"Model %s defined an empty list or tuple field: %s", cls, name
)
return "" return ""
embedded_cls = embedded_cls[0] embedded_cls = embedded_cls[0]
return cls.schema_for_type(f"{json_path}.{name}[*]", name, name_prefix, return cls.schema_for_type(
embedded_cls, field_info, parent_type=field_type) f"{json_path}.{name}[*]",
name,
name_prefix,
embedded_cls,
field_info,
parent_type=field_type,
)
elif field_is_model: elif field_is_model:
name_prefix = f"{name_prefix}_{name}" if name_prefix else name name_prefix = f"{name_prefix}_{name}" if name_prefix else name
sub_fields = [] sub_fields = []
@ -1273,12 +1406,16 @@ class JsonModel(RedisModel, abc.ABC):
# current field name and "embedded" field name, e.g., # current field name and "embedded" field name, e.g.,
# order.address.street_line_1. # order.address.street_line_1.
path = f"{json_path}.{name}" path = f"{json_path}.{name}"
sub_fields.append(cls.schema_for_type(path, sub_fields.append(
embedded_name, cls.schema_for_type(
name_prefix, path,
field.outer_type_, embedded_name,
field.field_info, name_prefix,
parent_type=typ)) field.outer_type_,
field.field_info,
parent_type=typ,
)
)
return " ".join(filter(None, sub_fields)) return " ".join(filter(None, sub_fields))
# NOTE: This is the termination point for recursion. We've descended # NOTE: This is the termination point for recursion. We've descended
# into models and lists until we found an actual value to index. # into models and lists until we found an actual value to index.
@ -1291,20 +1428,26 @@ class JsonModel(RedisModel, abc.ABC):
path = json_path path = json_path
else: else:
path = f"{json_path}.{name}" path = f"{json_path}.{name}"
sortable = getattr(field_info, 'sortable', False) sortable = getattr(field_info, "sortable", False)
full_text_search = getattr(field_info, 'full_text_search', False) full_text_search = getattr(field_info, "full_text_search", False)
sortable_tag_error = RedisModelError("In this Preview release, TAG fields cannot " sortable_tag_error = RedisModelError(
f"be marked as sortable. Problem field: {name}. " "In this Preview release, TAG fields cannot "
"See docs: TODO") f"be marked as sortable. Problem field: {name}. "
"See docs: TODO"
)
# TODO: GEO field # TODO: GEO field
if parent_is_container_type or parent_is_model_in_container: if parent_is_container_type or parent_is_model_in_container:
if typ is not str: if typ is not str:
raise RedisModelError("In this Preview release, list and tuple fields can only " raise RedisModelError(
f"contain strings. Problem field: {name}. See docs: TODO") "In this Preview release, list and tuple fields can only "
f"contain strings. Problem field: {name}. See docs: TODO"
)
if full_text_search is True: if full_text_search is True:
raise RedisModelError("List and tuple fields cannot be indexed for full-text " raise RedisModelError(
f"search. Problem field: {name}. See docs: TODO") "List and tuple fields cannot be indexed for full-text "
f"search. Problem field: {name}. See docs: TODO"
)
schema = f"{path} AS {index_field_name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR}" schema = f"{path} AS {index_field_name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR}"
if sortable is True: if sortable is True:
raise sortable_tag_error raise sortable_tag_error
@ -1312,8 +1455,10 @@ class JsonModel(RedisModel, abc.ABC):
schema = f"{path} AS {index_field_name} NUMERIC" schema = f"{path} AS {index_field_name} NUMERIC"
elif issubclass(typ, str): elif issubclass(typ, str):
if full_text_search is True: if full_text_search is True:
schema = f"{path} AS {index_field_name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR} " \ schema = (
f"{path} AS {index_field_name}_fts TEXT" f"{path} AS {index_field_name} TAG SEPARATOR {SINGLE_VALUE_TAG_FIELD_SEPARATOR} "
f"{path} AS {index_field_name}_fts TEXT"
)
if sortable is True: if sortable is True:
# NOTE: With the current preview release, making a field # NOTE: With the current preview release, making a field
# full-text searchable and sortable only makes the TEXT # full-text searchable and sortable only makes the TEXT

View file

@ -1,7 +1,7 @@
import abc import abc
from typing import Optional from typing import Optional
from redis_developer.model.model import JsonModel, HashModel from redis_developer.model.model import HashModel, JsonModel
class BaseJsonModel(JsonModel, abc.ABC): class BaseJsonModel(JsonModel, abc.ABC):
@ -20,7 +20,8 @@ class BaseHashModel(HashModel, abc.ABC):
# city: str # city: str
# country: str # country: str
# postal_code: str # postal_code: str
# #
class AddressHash(BaseHashModel): class AddressHash(BaseHashModel):
address_line_1: str address_line_1: str

View file

@ -1,5 +1,5 @@
from collections import Sequence from collections import Sequence
from typing import Any, Dict, Mapping, Union, List from typing import Any, Dict, List, Mapping, Union
from redis_developer.model.model import Expression from redis_developer.model.model import Expression
@ -91,6 +91,7 @@ class Not(LogicalOperatorForListOfExpressions):
-(@price:[-inf 10]) -(@category:{Sweets}) -(@price:[-inf 10]) -(@category:{Sweets})
``` ```
""" """
@property @property
def query(self): def query(self):
return "-(expression1) -(expression2)" return "-(expression1) -(expression2)"
@ -102,5 +103,3 @@ class QueryResolver:
def resolve(self) -> str: def resolve(self) -> str:
"""Resolve expressions to a RediSearch query string.""" """Resolve expressions to a RediSearch query string."""

View file

@ -5,9 +5,15 @@ and released under the MIT license: https://github.com/clemtoy/pptree
import io import io
def render_tree(current_node, nameattr='name', left_child='left', def render_tree(
right_child='right', indent='', last='updown', current_node,
buffer=None): nameattr="name",
left_child="left",
right_child="right",
indent="",
last="updown",
buffer=None,
):
"""Print a tree-like structure, `current_node`. """Print a tree-like structure, `current_node`.
This is a mostly-direct-copy of the print_tree() function from the ppbtree This is a mostly-direct-copy of the print_tree() function from the ppbtree
@ -18,42 +24,52 @@ def render_tree(current_node, nameattr='name', left_child='left',
if buffer is None: if buffer is None:
buffer = io.StringIO() buffer = io.StringIO()
if hasattr(current_node, nameattr): if hasattr(current_node, nameattr):
name = lambda node: getattr(node, nameattr) name = lambda node: getattr(node, nameattr) # noqa: E731
else: else:
name = lambda node: str(node) name = lambda node: str(node) # noqa: E731
up = getattr(current_node, left_child, None) up = getattr(current_node, left_child, None)
down = getattr(current_node, right_child, None) down = getattr(current_node, right_child, None)
if up is not None: if up is not None:
next_last = 'up' next_last = "up"
next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up' in last else '|', ' ' * len(str(name(current_node)))) next_indent = "{0}{1}{2}".format(
render_tree(up, nameattr, left_child, right_child, next_indent, next_last, buffer) indent, " " if "up" in last else "|", " " * len(str(name(current_node)))
)
render_tree(
up, nameattr, left_child, right_child, next_indent, next_last, buffer
)
if last == 'up': if last == "up":
start_shape = '' start_shape = ""
elif last == 'down': elif last == "down":
start_shape = '' start_shape = ""
elif last == 'updown': elif last == "updown":
start_shape = ' ' start_shape = " "
else: else:
start_shape = '' start_shape = ""
if up is not None and down is not None: if up is not None and down is not None:
end_shape = '' end_shape = ""
elif up: elif up:
end_shape = '' end_shape = ""
elif down: elif down:
end_shape = '' end_shape = ""
else: else:
end_shape = '' end_shape = ""
print('{0}{1}{2}{3}'.format(indent, start_shape, name(current_node), end_shape), print(
file=buffer) "{0}{1}{2}{3}".format(indent, start_shape, name(current_node), end_shape),
file=buffer,
)
if down is not None: if down is not None:
next_last = 'down' next_last = "down"
next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down' in last else '|', ' ' * len(str(name(current_node)))) next_indent = "{0}{1}{2}".format(
render_tree(down, nameattr, left_child, right_child, next_indent, next_last, buffer) indent, " " if "down" in last else "|", " " * len(str(name(current_node)))
)
render_tree(
down, nameattr, left_child, right_child, next_indent, next_last, buffer
)
return f"\n{buffer.getvalue()}" return f"\n{buffer.getvalue()}"

View file

@ -6,6 +6,7 @@ class TokenEscaper:
""" """
Escape punctuation within an input string. Escape punctuation within an input string.
""" """
# Characters that RediSearch requires us to escape during queries. # Characters that RediSearch requires us to escape during queries.
# Source: https://oss.redis.com/redisearch/Escaping/#the_rules_of_text_field_tokenization # Source: https://oss.redis.com/redisearch/Escaping/#the_rules_of_text_field_tokenization
DEFAULT_ESCAPED_CHARS = r"[,.<>{}\[\]\\\"\':;!@#$%^&*()\-+=~\ ]" DEFAULT_ESCAPED_CHARS = r"[,.<>{}\[\]\\\"\':;!@#$%^&*()\-+=~\ ]"

View file

@ -1,6 +1,6 @@
import abc import abc
import decimal
import datetime import datetime
import decimal
from typing import Optional from typing import Optional
from unittest import mock from unittest import mock
@ -8,11 +8,13 @@ import pytest
import redis import redis
from pydantic import ValidationError from pydantic import ValidationError
from redis_developer.model import ( from redis_developer.model import Field, HashModel
HashModel, from redis_developer.model.model import (
Field, NotFoundError,
QueryNotSupportedError,
RedisModelError,
) )
from redis_developer.model.model import RedisModelError, QueryNotSupportedError, NotFoundError
r = redis.Redis() r = redis.Redis()
today = datetime.date.today() today = datetime.date.today()
@ -48,7 +50,7 @@ def members():
last_name="Brookins", last_name="Brookins",
email="a@example.com", email="a@example.com",
age=38, age=38,
join_date=today join_date=today,
) )
member2 = Member( member2 = Member(
@ -56,7 +58,7 @@ def members():
last_name="Brookins", last_name="Brookins",
email="k@example.com", email="k@example.com",
age=34, age=34,
join_date=today join_date=today,
) )
member3 = Member( member3 = Member(
@ -64,7 +66,7 @@ def members():
last_name="Smith", last_name="Smith",
email="as@example.com", email="as@example.com",
age=100, age=100,
join_date=today join_date=today,
) )
member1.save() member1.save()
member2.save() member2.save()
@ -76,21 +78,13 @@ def members():
def test_validates_required_fields(): def test_validates_required_fields():
# Raises ValidationError: last_name is required # Raises ValidationError: last_name is required
with pytest.raises(ValidationError): with pytest.raises(ValidationError):
Member( Member(first_name="Andrew", zipcode="97086", join_date=today)
first_name="Andrew",
zipcode="97086",
join_date=today
)
def test_validates_field(): def test_validates_field():
# Raises ValidationError: join_date is not a date # Raises ValidationError: join_date is not a date
with pytest.raises(ValidationError): with pytest.raises(ValidationError):
Member( Member(first_name="Andrew", last_name="Brookins", join_date="yesterday")
first_name="Andrew",
last_name="Brookins",
join_date="yesterday"
)
# Passes validation # Passes validation
@ -100,7 +94,7 @@ def test_validation_passes():
last_name="Brookins", last_name="Brookins",
email="a@example.com", email="a@example.com",
join_date=today, join_date=today,
age=38 age=38,
) )
assert member.first_name == "Andrew" assert member.first_name == "Andrew"
@ -111,7 +105,7 @@ def test_saves_model_and_creates_pk():
last_name="Brookins", last_name="Brookins",
email="a@example.com", email="a@example.com",
join_date=today, join_date=today,
age=38 age=38,
) )
# Save a model instance to Redis # Save a model instance to Redis
member.save() member.save()
@ -129,6 +123,7 @@ def test_raises_error_with_embedded_models():
postal_code: str postal_code: str
with pytest.raises(RedisModelError): with pytest.raises(RedisModelError):
class InvalidMember(BaseHashModel): class InvalidMember(BaseHashModel):
address: Address address: Address
@ -140,14 +135,14 @@ def test_saves_many():
first_name="Andrew", first_name="Andrew",
last_name="Brookins", last_name="Brookins",
email="a@example.com", email="a@example.com",
join_date=today join_date=today,
), ),
Member( Member(
first_name="Kim", first_name="Kim",
last_name="Brookins", last_name="Brookins",
email="k@example.com", email="k@example.com",
join_date=today join_date=today,
) ),
] ]
Member.add(members) Member.add(members)
@ -174,21 +169,21 @@ def test_paginate_query(members):
def test_access_result_by_index_cached(members): def test_access_result_by_index_cached(members):
member1, member2, member3 = members member1, member2, member3 = members
query = Member.find().sort_by('age') query = Member.find().sort_by("age")
# Load the cache, throw away the result. # Load the cache, throw away the result.
assert query._model_cache == [] assert query._model_cache == []
query.execute() query.execute()
assert query._model_cache == [member2, member1, member3] assert query._model_cache == [member2, member1, member3]
# Access an item that should be in the cache. # Access an item that should be in the cache.
with mock.patch.object(query.model, 'db') as mock_db: with mock.patch.object(query.model, "db") as mock_db:
assert query[0] == member2 assert query[0] == member2
assert not mock_db.called assert not mock_db.called
def test_access_result_by_index_not_cached(members): def test_access_result_by_index_not_cached(members):
member1, member2, member3 = members member1, member2, member3 = members
query = Member.find().sort_by('age') query = Member.find().sort_by("age")
# Assert that we don't have any models in the cache yet -- we # Assert that we don't have any models in the cache yet -- we
# haven't made any requests of Redis. # haven't made any requests of Redis.
@ -205,7 +200,8 @@ def test_exact_match_queries(members):
assert actual == [member1, member2] assert actual == [member1, member2]
actual = Member.find( actual = Member.find(
(Member.last_name == "Brookins") & ~(Member.first_name == "Andrew")).all() (Member.last_name == "Brookins") & ~(Member.first_name == "Andrew")
).all()
assert actual == [member2] assert actual == [member2]
actual = Member.find(~(Member.last_name == "Brookins")).all() actual = Member.find(~(Member.last_name == "Brookins")).all()
@ -220,16 +216,19 @@ def test_exact_match_queries(members):
).all() ).all()
assert actual == [member1, member2] assert actual == [member1, member2]
actual = Member.find(Member.first_name == "Kim", Member.last_name == "Brookins").all() actual = Member.find(
Member.first_name == "Kim", Member.last_name == "Brookins"
).all()
assert actual == [member2] assert actual == [member2]
def test_recursive_query_resolution(members): def test_recursive_query_resolution(members):
member1, member2, member3 = members member1, member2, member3 = members
actual = Member.find((Member.last_name == "Brookins") | ( actual = Member.find(
Member.age == 100 (Member.last_name == "Brookins")
) & (Member.last_name == "Smith")).all() | (Member.age == 100) & (Member.last_name == "Smith")
).all()
assert actual == [member1, member2, member3] assert actual == [member1, member2, member3]
@ -237,8 +236,9 @@ def test_tag_queries_boolean_logic(members):
member1, member2, member3 = members member1, member2, member3 = members
actual = Member.find( actual = Member.find(
(Member.first_name == "Andrew") & (Member.first_name == "Andrew") & (Member.last_name == "Brookins")
(Member.last_name == "Brookins") | (Member.last_name == "Smith")).all() | (Member.last_name == "Smith")
).all()
assert actual == [member1, member3] assert actual == [member1, member3]
@ -281,9 +281,7 @@ def test_tag_queries_negation(members):
Andrew Andrew
""" """
query = Member.find( query = Member.find(~(Member.first_name == "Andrew"))
~(Member.first_name == "Andrew")
)
assert query.all() == [member2] assert query.all() == [member2]
""" """
@ -315,8 +313,9 @@ def test_tag_queries_negation(members):
Smith Smith
""" """
query = Member.find( query = Member.find(
~(Member.first_name == "Andrew") & ~(Member.first_name == "Andrew")
((Member.last_name == "Brookins") | (Member.last_name == "Smith"))) & ((Member.last_name == "Brookins") | (Member.last_name == "Smith"))
)
assert query.all() == [member2] assert query.all() == [member2]
""" """
@ -333,12 +332,14 @@ def test_tag_queries_negation(members):
Smith Smith
""" """
query = Member.find( query = Member.find(
~(Member.first_name == "Andrew") & ~(Member.first_name == "Andrew") & (Member.last_name == "Brookins")
(Member.last_name == "Brookins") | (Member.last_name == "Smith")) | (Member.last_name == "Smith")
)
assert query.all() == [member2, member3] assert query.all() == [member2, member3]
actual = Member.find( actual = Member.find(
(Member.first_name == "Andrew") & ~(Member.last_name == "Brookins")).all() (Member.first_name == "Andrew") & ~(Member.last_name == "Brookins")
).all()
assert actual == [member3] assert actual == [member3]
@ -373,19 +374,19 @@ def test_numeric_queries(members):
def test_sorting(members): def test_sorting(members):
member1, member2, member3 = members member1, member2, member3 = members
actual = Member.find(Member.age > 34).sort_by('age').all() actual = Member.find(Member.age > 34).sort_by("age").all()
assert actual == [member1, member3] assert actual == [member1, member3]
actual = Member.find(Member.age > 34).sort_by('-age').all() actual = Member.find(Member.age > 34).sort_by("-age").all()
assert actual == [member3, member1] assert actual == [member3, member1]
with pytest.raises(QueryNotSupportedError): with pytest.raises(QueryNotSupportedError):
# This field does not exist. # This field does not exist.
Member.find().sort_by('not-a-real-field').all() Member.find().sort_by("not-a-real-field").all()
with pytest.raises(QueryNotSupportedError): with pytest.raises(QueryNotSupportedError):
# This field is not sortable. # This field is not sortable.
Member.find().sort_by('join_date').all() Member.find().sort_by("join_date").all()
def test_not_found(): def test_not_found():
@ -403,4 +404,7 @@ def test_schema():
another_integer: int another_integer: int
another_float: float another_float: float
assert Address.redisearch_schema() == "ON HASH PREFIX 1 redis-developer:tests.test_hash_model.Address: SCHEMA pk TAG SEPARATOR | a_string TAG SEPARATOR | a_full_text_string TAG SEPARATOR | a_full_text_string_fts TEXT an_integer NUMERIC SORTABLE a_float NUMERIC" assert (
Address.redisearch_schema()
== "ON HASH PREFIX 1 redis-developer:tests.test_hash_model.Address: SCHEMA pk TAG SEPARATOR | a_string TAG SEPARATOR | a_full_text_string TAG SEPARATOR | a_full_text_string_fts TEXT an_integer NUMERIC SORTABLE a_float NUMERIC"
)

View file

@ -1,20 +1,21 @@
import abc import abc
import decimal
import datetime import datetime
from typing import Optional, List import decimal
from typing import List, Optional
from unittest import mock from unittest import mock
import pytest import pytest
import redis import redis
from pydantic import ValidationError from pydantic import ValidationError
from redis_developer.model import ( from redis_developer.model import EmbeddedJsonModel, Field, JsonModel
EmbeddedJsonModel,
JsonModel,
Field,
)
from redis_developer.model.migrations.migrator import Migrator from redis_developer.model.migrations.migrator import Migrator
from redis_developer.model.model import QueryNotSupportedError, NotFoundError, RedisModelError from redis_developer.model.model import (
NotFoundError,
QueryNotSupportedError,
RedisModelError,
)
r = redis.Redis() r = redis.Redis()
today = datetime.date.today() today = datetime.date.today()
@ -75,7 +76,7 @@ def address():
city="Portland", city="Portland",
state="OR", state="OR",
country="USA", country="USA",
postal_code=11111 postal_code=11111,
) )
@ -87,7 +88,7 @@ def members(address):
email="a@example.com", email="a@example.com",
age=38, age=38,
join_date=today, join_date=today,
address=address address=address,
) )
member2 = Member( member2 = Member(
@ -96,7 +97,7 @@ def members(address):
email="k@example.com", email="k@example.com",
age=34, age=34,
join_date=today, join_date=today,
address=address address=address,
) )
member3 = Member( member3 = Member(
@ -105,7 +106,7 @@ def members(address):
email="as@example.com", email="as@example.com",
age=100, age=100,
join_date=today, join_date=today,
address=address address=address,
) )
member1.save() member1.save()
@ -133,7 +134,7 @@ def test_validates_field(address):
first_name="Andrew", first_name="Andrew",
last_name="Brookins", last_name="Brookins",
join_date="yesterday", join_date="yesterday",
address=address address=address,
) )
@ -145,7 +146,7 @@ def test_validation_passes(address):
email="a@example.com", email="a@example.com",
join_date=today, join_date=today,
age=38, age=38,
address=address address=address,
) )
assert member.first_name == "Andrew" assert member.first_name == "Andrew"
@ -157,7 +158,7 @@ def test_saves_model_and_creates_pk(address):
email="a@example.com", email="a@example.com",
join_date=today, join_date=today,
age=38, age=38,
address=address address=address,
) )
# Save a model instance to Redis # Save a model instance to Redis
member.save() member.save()
@ -176,7 +177,7 @@ def test_saves_many(address):
email="a@example.com", email="a@example.com",
join_date=today, join_date=today,
address=address, address=address,
age=38 age=38,
), ),
Member( Member(
first_name="Kim", first_name="Kim",
@ -184,8 +185,8 @@ def test_saves_many(address):
email="k@example.com", email="k@example.com",
join_date=today, join_date=today,
address=address, address=address,
age=34 age=34,
) ),
] ]
Member.add(members) Member.add(members)
@ -216,21 +217,21 @@ def test_paginate_query(members):
def test_access_result_by_index_cached(members): def test_access_result_by_index_cached(members):
member1, member2, member3 = members member1, member2, member3 = members
query = Member.find().sort_by('age') query = Member.find().sort_by("age")
# Load the cache, throw away the result. # Load the cache, throw away the result.
assert query._model_cache == [] assert query._model_cache == []
query.execute() query.execute()
assert query._model_cache == [member2, member1, member3] assert query._model_cache == [member2, member1, member3]
# Access an item that should be in the cache. # Access an item that should be in the cache.
with mock.patch.object(query.model, 'db') as mock_db: with mock.patch.object(query.model, "db") as mock_db:
assert query[0] == member2 assert query[0] == member2
assert not mock_db.called assert not mock_db.called
def test_access_result_by_index_not_cached(members): def test_access_result_by_index_not_cached(members):
member1, member2, member3 = members member1, member2, member3 = members
query = Member.find().sort_by('age') query = Member.find().sort_by("age")
# Assert that we don't have any models in the cache yet -- we # Assert that we don't have any models in the cache yet -- we
# haven't made any requests of Redis. # haven't made any requests of Redis.
@ -252,8 +253,11 @@ def test_update_query(members):
Member.find(Member.pk << [member1.pk, member2.pk, member3.pk]).update( Member.find(Member.pk << [member1.pk, member2.pk, member3.pk]).update(
first_name="Bobby" first_name="Bobby"
) )
actual = Member.find( actual = (
Member.pk << [member1.pk, member2.pk, member3.pk]).sort_by('age').all() Member.find(Member.pk << [member1.pk, member2.pk, member3.pk])
.sort_by("age")
.all()
)
assert actual == [member1, member2, member3] assert actual == [member1, member2, member3]
assert all([m.name == "Bobby" for m in actual]) assert all([m.name == "Bobby" for m in actual])
@ -263,24 +267,27 @@ def test_exact_match_queries(members):
actual = Member.find(Member.last_name == "Brookins").all() actual = Member.find(Member.last_name == "Brookins").all()
assert actual == [member1, member2] assert actual == [member1, member2]
actual = Member.find( actual = Member.find(
(Member.last_name == "Brookins") & ~(Member.first_name == "Andrew")).all() (Member.last_name == "Brookins") & ~(Member.first_name == "Andrew")
).all()
assert actual == [member2] assert actual == [member2]
actual = Member.find(~(Member.last_name == "Brookins")).all() actual = Member.find(~(Member.last_name == "Brookins")).all()
assert actual == [member3] assert actual == [member3]
actual = Member.find(Member.last_name != "Brookins").all() actual = Member.find(Member.last_name != "Brookins").all()
assert actual == [member3] assert actual == [member3]
actual = Member.find( actual = Member.find(
(Member.last_name == "Brookins") & (Member.first_name == "Andrew") (Member.last_name == "Brookins") & (Member.first_name == "Andrew")
| (Member.first_name == "Kim") | (Member.first_name == "Kim")
).all() ).all()
assert actual == [member1, member2] assert actual == [member1, member2]
actual = Member.find(Member.first_name == "Kim", Member.last_name == "Brookins").all() actual = Member.find(
Member.first_name == "Kim", Member.last_name == "Brookins"
).all()
assert actual == [member2] assert actual == [member2]
actual = Member.find(Member.address.city == "Portland").all() actual = Member.find(Member.address.city == "Portland").all()
@ -290,24 +297,28 @@ def test_exact_match_queries(members):
def test_recursive_query_expression_resolution(members): def test_recursive_query_expression_resolution(members):
member1, member2, member3 = members member1, member2, member3 = members
actual = Member.find((Member.last_name == "Brookins") | ( actual = Member.find(
Member.age == 100 (Member.last_name == "Brookins")
) & (Member.last_name == "Smith")).all() | (Member.age == 100) & (Member.last_name == "Smith")
).all()
assert actual == [member1, member2, member3] assert actual == [member1, member2, member3]
def test_recursive_query_field_resolution(members): def test_recursive_query_field_resolution(members):
member1, _, _ = members member1, _, _ = members
member1.address.note = Note(description="Weird house", member1.address.note = Note(
created_on=datetime.datetime.now()) description="Weird house", created_on=datetime.datetime.now()
)
member1.save() member1.save()
actual = Member.find(Member.address.note.description == "Weird house").all() actual = Member.find(Member.address.note.description == "Weird house").all()
assert actual == [member1] assert actual == [member1]
member1.orders = [ member1.orders = [
Order(items=[Item(price=10.99, name="Ball")], Order(
total=10.99, items=[Item(price=10.99, name="Ball")],
created_on=datetime.datetime.now()) total=10.99,
created_on=datetime.datetime.now(),
)
] ]
member1.save() member1.save()
actual = Member.find(Member.orders.items.name == "Ball").all() actual = Member.find(Member.orders.items.name == "Ball").all()
@ -331,8 +342,9 @@ def test_tag_queries_boolean_logic(members):
member1, member2, member3 = members member1, member2, member3 = members
actual = Member.find( actual = Member.find(
(Member.first_name == "Andrew") & (Member.first_name == "Andrew") & (Member.last_name == "Brookins")
(Member.last_name == "Brookins") | (Member.last_name == "Smith")).all() | (Member.last_name == "Smith")
).all()
assert actual == [member1, member3] assert actual == [member1, member3]
@ -343,7 +355,7 @@ def test_tag_queries_punctuation(address):
email="a|b@example.com", # NOTE: This string uses the TAG field separator. email="a|b@example.com", # NOTE: This string uses the TAG field separator.
age=38, age=38,
join_date=today, join_date=today,
address=address address=address,
) )
member1.save() member1.save()
@ -353,7 +365,7 @@ def test_tag_queries_punctuation(address):
email="a|villain@example.com", # NOTE: This string uses the TAG field separator. email="a|villain@example.com", # NOTE: This string uses the TAG field separator.
age=38, age=38,
join_date=today, join_date=today,
address=address address=address,
) )
member2.save() member2.save()
@ -377,9 +389,7 @@ def test_tag_queries_negation(members):
Andrew Andrew
""" """
query = Member.find( query = Member.find(~(Member.first_name == "Andrew"))
~(Member.first_name == "Andrew")
)
assert query.all() == [member2] assert query.all() == [member2]
""" """
@ -411,8 +421,9 @@ def test_tag_queries_negation(members):
Smith Smith
""" """
query = Member.find( query = Member.find(
~(Member.first_name == "Andrew") & ~(Member.first_name == "Andrew")
((Member.last_name == "Brookins") | (Member.last_name == "Smith"))) & ((Member.last_name == "Brookins") | (Member.last_name == "Smith"))
)
assert query.all() == [member2] assert query.all() == [member2]
""" """
@ -429,12 +440,14 @@ def test_tag_queries_negation(members):
Smith Smith
""" """
query = Member.find( query = Member.find(
~(Member.first_name == "Andrew") & ~(Member.first_name == "Andrew") & (Member.last_name == "Brookins")
(Member.last_name == "Brookins") | (Member.last_name == "Smith")) | (Member.last_name == "Smith")
)
assert query.all() == [member2, member3] assert query.all() == [member2, member3]
actual = Member.find( actual = Member.find(
(Member.first_name == "Andrew") & ~(Member.last_name == "Brookins")).all() (Member.first_name == "Andrew") & ~(Member.last_name == "Brookins")
).all()
assert actual == [member3] assert actual == [member3]
@ -469,19 +482,19 @@ def test_numeric_queries(members):
def test_sorting(members): def test_sorting(members):
member1, member2, member3 = members member1, member2, member3 = members
actual = Member.find(Member.age > 34).sort_by('age').all() actual = Member.find(Member.age > 34).sort_by("age").all()
assert actual == [member1, member3] assert actual == [member1, member3]
actual = Member.find(Member.age > 34).sort_by('-age').all() actual = Member.find(Member.age > 34).sort_by("-age").all()
assert actual == [member3, member1] assert actual == [member3, member1]
with pytest.raises(QueryNotSupportedError): with pytest.raises(QueryNotSupportedError):
# This field does not exist. # This field does not exist.
Member.find().sort_by('not-a-real-field').all() Member.find().sort_by("not-a-real-field").all()
with pytest.raises(QueryNotSupportedError): with pytest.raises(QueryNotSupportedError):
# This field is not sortable. # This field is not sortable.
Member.find().sort_by('join_date').all() Member.find().sort_by("join_date").all()
def test_not_found(): def test_not_found():
@ -492,24 +505,28 @@ def test_not_found():
def test_list_field_limitations(): def test_list_field_limitations():
with pytest.raises(RedisModelError): with pytest.raises(RedisModelError):
class SortableTarotWitch(BaseJsonModel): class SortableTarotWitch(BaseJsonModel):
# We support indexing lists of strings for quality and membership # We support indexing lists of strings for quality and membership
# queries. Sorting is not supported, but is planned. # queries. Sorting is not supported, but is planned.
tarot_cards: List[str] = Field(index=True, sortable=True) tarot_cards: List[str] = Field(index=True, sortable=True)
with pytest.raises(RedisModelError): with pytest.raises(RedisModelError):
class SortableFullTextSearchAlchemicalWitch(BaseJsonModel): class SortableFullTextSearchAlchemicalWitch(BaseJsonModel):
# We don't support indexing a list of strings for full-text search # We don't support indexing a list of strings for full-text search
# queries. Support for this feature is not planned. # queries. Support for this feature is not planned.
potions: List[str] = Field(index=True, full_text_search=True) potions: List[str] = Field(index=True, full_text_search=True)
with pytest.raises(RedisModelError): with pytest.raises(RedisModelError):
class NumerologyWitch(BaseJsonModel): class NumerologyWitch(BaseJsonModel):
# We don't support indexing a list of numbers. Support for this # We don't support indexing a list of numbers. Support for this
# feature is To Be Determined. # feature is To Be Determined.
lucky_numbers: List[int] = Field(index=True) lucky_numbers: List[int] = Field(index=True)
with pytest.raises(RedisModelError): with pytest.raises(RedisModelError):
class ReadingWithPrice(EmbeddedJsonModel): class ReadingWithPrice(EmbeddedJsonModel):
gold_coins_charged: int = Field(index=True) gold_coins_charged: int = Field(index=True)
@ -532,13 +549,14 @@ def test_list_field_limitations():
# suite's migrator has already looked for migrations to run. # suite's migrator has already looked for migrations to run.
Migrator().run() Migrator().run()
witch = TarotWitch( witch = TarotWitch(tarot_cards=["death"])
tarot_cards=['death']
)
witch.save() witch.save()
actual = TarotWitch.find(TarotWitch.tarot_cards << 'death').all() actual = TarotWitch.find(TarotWitch.tarot_cards << "death").all()
assert actual == [witch] assert actual == [witch]
def test_schema(): def test_schema():
assert Member.redisearch_schema() == "ON JSON PREFIX 1 redis-developer:tests.test_json_model.Member: SCHEMA $.pk AS pk TAG SEPARATOR | $.first_name AS first_name TAG SEPARATOR | $.last_name AS last_name TAG SEPARATOR | $.email AS email TAG SEPARATOR | $.age AS age NUMERIC $.bio AS bio TAG SEPARATOR | $.bio AS bio_fts TEXT $.address.pk AS address_pk TAG SEPARATOR | $.address.city AS address_city TAG SEPARATOR | $.address.postal_code AS address_postal_code TAG SEPARATOR | $.address.note.pk AS address_note_pk TAG SEPARATOR | $.address.note.description AS address_note_description TAG SEPARATOR | $.orders[*].pk AS orders_pk TAG SEPARATOR | $.orders[*].items[*].pk AS orders_items_pk TAG SEPARATOR | $.orders[*].items[*].name AS orders_items_name TAG SEPARATOR |" assert (
Member.redisearch_schema()
== "ON JSON PREFIX 1 redis-developer:tests.test_json_model.Member: SCHEMA $.pk AS pk TAG SEPARATOR | $.first_name AS first_name TAG SEPARATOR | $.last_name AS last_name TAG SEPARATOR | $.email AS email TAG SEPARATOR | $.age AS age NUMERIC $.bio AS bio TAG SEPARATOR | $.bio AS bio_fts TEXT $.address.pk AS address_pk TAG SEPARATOR | $.address.city AS address_city TAG SEPARATOR | $.address.postal_code AS address_postal_code TAG SEPARATOR | $.address.note.pk AS address_note_pk TAG SEPARATOR | $.address.note.description AS address_note_description TAG SEPARATOR | $.orders[*].pk AS orders_pk TAG SEPARATOR | $.orders[*].items[*].pk AS orders_items_pk TAG SEPARATOR | $.orders[*].items[*].name AS orders_items_name TAG SEPARATOR |"
)