diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f92a7fe..054bbf6 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -12,7 +12,7 @@ on:
- '[0-9].[0-9]+'
- 'update/pre-commit-autoupdate'
schedule:
- - cron: '0 6 * * *' # Daily 6AM UTC build
+ - cron: '0 6 * * *' # Daily 6AM UTC build
jobs:
@@ -22,63 +22,57 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- - name: Checkout
- uses: actions/checkout@v2.3.4
- - name: Setup Python 3.9
- uses: actions/setup-python@v2
- with:
- python-version: 3.9
- #----------------------------------------------
- # ----- install & configure poetry -----
- #----------------------------------------------
- - name: Install Poetry
- uses: snok/install-poetry@v1
- with:
- virtualenvs-create: true
- virtualenvs-in-project: true
- installer-parallel: true
- #----------------------------------------------
- # load cached venv if cache exists
- #----------------------------------------------
- - name: Load cached venv
- id: cached-poetry-dependencies
- uses: actions/cache@v2
- with:
- path: .venv
- key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
- #----------------------------------------------
- # install dependencies if cache does not exist
- #----------------------------------------------
- - name: Install dependencies
- if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
- run: poetry install --no-interaction --no-root
- #----------------------------------------------
- # install your root project, if required
- #----------------------------------------------
- - name: Install library
- run: poetry install --no-interaction
- #----------------------------------------------
- # run test suite
- #----------------------------------------------
- - name: Run linter
- run: |
- make lint
-# - name: Prepare twine checker
-# run: |
-# pip install -U twine wheel
-# python setup.py sdist bdist_wheel
-# - name: Run twine checker
-# run: |
-# twine check dist/*
+ - name: Checkout
+ uses: actions/checkout@v2.3.5
+ - name: Setup Python 3.9
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ #----------------------------------------------
+ # ----- install & configure poetry -----
+ #----------------------------------------------
+ - name: Install Poetry
+ uses: snok/install-poetry@v1
+ with:
+ virtualenvs-create: true
+ virtualenvs-in-project: true
+ installer-parallel: true
+ #----------------------------------------------
+ # load cached venv if cache exists
+ #----------------------------------------------
+ - name: Load cached venv
+ id: cached-poetry-dependencies
+ uses: actions/cache@v2
+ with:
+ path: .venv
+ key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
+ #----------------------------------------------
+ # install dependencies if cache does not exist
+ #----------------------------------------------
+ - name: Install dependencies
+ if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
+ run: poetry install --no-interaction --no-root
+ #----------------------------------------------
+ # install your root project, if required
+ #----------------------------------------------
+ - name: Install library
+ run: poetry install --no-interaction
+ #----------------------------------------------
+ # run test suite
+ #----------------------------------------------
+ - name: Run linter
+ run: |
+ make dist
+ make lint
test-unix:
name: Test Unix
needs: lint
strategy:
matrix:
- os: [ubuntu-latest]
- pyver: [3.6, 3.7, 3.8, 3.9, pypy3]
- redismod: ["edge", "preview"]
+ os: [ ubuntu-latest ]
+ pyver: [ 3.6, 3.7, 3.8, 3.9, pypy3 ]
+ redismod: [ "preview" ]
fail-fast: false
services:
redis:
@@ -98,75 +92,95 @@ jobs:
OS: ${{ matrix.os }}
INSTALL_DIR: ${{ github.workspace }}/redis
steps:
- - name: Checkout
- uses: actions/checkout@v2.3.4
- - name: Setup Python ${{ matrix.pyver }}
- uses: actions/setup-python@v2
- with:
- python-version: ${{ matrix.pyver }}
- #----------------------------------------------
- # ----- install & configure poetry -----
- #----------------------------------------------
- - name: Install Poetry
- uses: snok/install-poetry@v1
- with:
- virtualenvs-create: true
- virtualenvs-in-project: true
- installer-parallel: true
- #----------------------------------------------
- # load cached venv if cache exists
- #----------------------------------------------
- - name: Load cached venv
- id: cached-poetry-dependencies
- uses: actions/cache@v2
- with:
- path: .venv
- key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
- #----------------------------------------------
- # install dependencies if cache does not exist
- #----------------------------------------------
- - name: Install dependencies
- if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
- run: poetry install --no-interaction --no-root
- #----------------------------------------------
- # install your root project, if required
- #----------------------------------------------
- - name: Install library
- run: poetry install --no-interaction
- - name: Run unittests (redismod:${{ matrix.redismod }}, ${{ matrix.os }})
- run: |
- make test
- poetry run coverage xml
- - name: Upload coverage
- uses: codecov/codecov-action@v2.1.0
- with:
- file: ./coverage.xml
- flags: unit
- env_vars: OS
- fail_ci_if_error: false
+ - name: Checkout
+ uses: actions/checkout@v2.3.5
+ - name: Setup Python ${{ matrix.pyver }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.pyver }}
+ #----------------------------------------------
+ # ----- install & configure poetry -----
+ #----------------------------------------------
+ - name: Install Poetry
+ uses: snok/install-poetry@v1
+ with:
+ virtualenvs-create: true
+ virtualenvs-in-project: true
+ installer-parallel: true
+ #----------------------------------------------
+ # load cached venv if cache exists
+ #----------------------------------------------
+ - name: Load cached venv
+ id: cached-poetry-dependencies
+ uses: actions/cache@v2
+ with:
+ path: .venv
+ key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
+ #----------------------------------------------
+ # install dependencies if cache does not exist
+ #----------------------------------------------
+ - name: Install dependencies
+ if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
+ run: poetry install --no-interaction --no-root
+ #----------------------------------------------
+ # install your root project, if required
+ #----------------------------------------------
+ - name: Install library
+ run: poetry install --no-interaction
+ - name: Run unittests (redismod:${{ matrix.redismod }}, ${{ matrix.os }})
+ run: |
+ make test
+ poetry run coverage xml
+ - name: Upload coverage
+ uses: codecov/codecov-action@v2.1.0
+ with:
+ file: ./coverage.xml
+ flags: unit
+ env_vars: OS
+ fail_ci_if_error: false
deploy:
- name: Deploy
- runs-on: ubuntu-latest
- needs: test-unix
- # Run only on pushing a tag
- if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
- steps:
- - name: Checkout
- uses: actions/checkout@v2.3.4
- - name: Setup Python 3.9
- uses: actions/setup-python@v2
- with:
- python-version: 3.9
- - name: Install dependencies
- run:
- python -m pip install -U pip wheel twine
- - name: Make dists
- run:
- python setup.py sdist bdist_wheel
- - name: PyPI upload
- env:
- TWINE_USERNAME: __token__
- TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
- run: |
- twine upload dist/*
+ name: Deploy
+ runs-on: ubuntu-latest
+ needs: test-unix
+ # Run only on pushing a tag
+ if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2.3.5
+ - name: Setup Python 3.9
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Install Poetry
+ uses: snok/install-poetry@v1
+ with:
+ virtualenvs-create: true
+ virtualenvs-in-project: true
+ installer-parallel: true
+ #----------------------------------------------
+ # load cached venv if cache exists
+ #----------------------------------------------
+ - name: Load cached venv
+ id: cached-poetry-dependencies
+ uses: actions/cache@v2
+ with:
+ path: .venv
+ key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
+ #----------------------------------------------
+ # install dependencies if cache does not exist
+ #----------------------------------------------
+ - name: Install dependencies
+ if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
+ run: poetry install --no-interaction --no-root
+ #----------------------------------------------
+ # install your root project, if required
+ #----------------------------------------------
+ - name: Install library
+ run: poetry install --no-interaction
+ - name: PyPI upload
+ env:
+ TWINE_USERNAME: __token__
+ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
+ run: |
+ make upload
diff --git a/.gitignore b/.gitignore
index 1d4e9fe..3c80a8e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -128,3 +128,9 @@ dmypy.json
# Pyre type checker
.pyre/
data
+
+# Makefile install checker
+.install.stamp
+
+# Sync version of the library, via Unasync
+redis_om/
\ No newline at end of file
diff --git a/.install.stamp b/.install.stamp
deleted file mode 100644
index e69de29..0000000
diff --git a/LICENSE b/LICENSE
index 3c1d366..f829336 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,21 +1,26 @@
-The MIT License (MIT)
+Copyright 2021 Redis, Inc.
-Copyright (c) 2021-present Redis, Inc.
+Redistribution and use in source and binary forms, with or without modification,
+are permitted provided that the following conditions are met:
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation and/or
+other materials provided with the distribution.
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+3. Neither the name of the copyright holder nor the names of its contributors
+may be used to endorse or promote products derived from this software without
+specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/Makefile b/Makefile
index 9c88b0b..e75faef 100644
--- a/Makefile
+++ b/Makefile
@@ -1,4 +1,5 @@
-NAME := redis_om
+NAME := aredis_om
+SYNC_NAME := redis_om
INSTALL_STAMP := .install.stamp
POETRY := $(shell command -v poetry 2> /dev/null)
@@ -19,7 +20,7 @@ help:
@echo "Check the Makefile to know exactly what each target is doing."
install: $(INSTALL_STAMP)
-$(INSTALL_STAMP): pyproject.toml poetry.lock
+$(INSTALL_STAMP): pyproject.toml
@if [ -z $(POETRY) ]; then echo "Poetry could not be found. See https://python-poetry.org/docs/"; exit 2; fi
$(POETRY) install
touch $(INSTALL_STAMP)
@@ -28,23 +29,48 @@ $(INSTALL_STAMP): pyproject.toml poetry.lock
clean:
find . -type d -name "__pycache__" | xargs rm -rf {};
rm -rf $(INSTALL_STAMP) .coverage .mypy_cache
+ rm -rf build
+ rm -rf dist
+ rm -rf redis_om
+
+
+.PHONY: dist
+dist: $(INSTALL_STAMP) clean sync
+ $(POETRY) build
+
+.PHONY: sync
+sync: $(INSTALL_STAMP)
+ $(POETRY) run python make_sync.py
+
+.PHONY: upload
+upload: dist
+ $(POETRY) run twine upload dist/*
.PHONY: lint
-lint: $(INSTALL_STAMP)
- $(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME)
+lint: $(INSTALL_STAMP) dist
+ $(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME) $(SYNC_NAME)
$(POETRY) run black ./tests/ $(NAME)
- $(POETRY) run flake8 --ignore=W503,E501,F401,E731 ./tests/ $(NAME)
- $(POETRY) run mypy ./tests/ $(NAME) --ignore-missing-imports
- $(POETRY) run bandit -r $(NAME) -s B608
+ $(POETRY) run flake8 --ignore=W503,E501,F401,E731 ./tests/ $(NAME) $(SYNC_NAME)
+ $(POETRY) run mypy ./tests/ $(NAME) $(SYNC_NAME) --ignore-missing-imports
+ $(POETRY) run bandit -r $(NAME) $(SYNC_NAME) -s B608
+ $(POETRY) run twine check dist/*
.PHONY: format
-format: $(INSTALL_STAMP)
- $(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME)
- $(POETRY) run black ./tests/ $(NAME)
+format: $(INSTALL_STAMP) sync
+ $(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME) $(SYNC_NAME)
+ $(POETRY) run black ./tests/ $(NAME) $(SYNC_NAME)
.PHONY: test
-test: $(INSTALL_STAMP)
- $(POETRY) run pytest -n auto -s -vv ./tests/ --cov-report term-missing --cov $(NAME)
+test: $(INSTALL_STAMP) sync
+ $(POETRY) run pytest -n auto -s -vv ./tests/ --cov-report term-missing --cov $(NAME) $(SYNC_NAME)
+
+.PHONY: test_oss
+test_oss: $(INSTALL_STAMP) sync
+ # Specifically tests against a local OSS Redis instance via
+ # docker-compose.yml. Do not use this for CI testing, where we should
+ # instead have a matrix of Docker images.
+ REDIS_OM_URL="redis://localhost:6381" $(POETRY) run pytest -n auto -s -vv ./tests/ --cov-report term-missing --cov $(NAME)
+
.PHONY: shell
shell: $(INSTALL_STAMP)
diff --git a/README.md b/README.md
index 950ad06..9fc5799 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,14 @@
-
Redis OM
+
+
+
+
+
+
+
+
- Objecting mapping and more, for Redis.
+ Object mapping, and more, for Redis and Python
@@ -11,52 +18,243 @@
[![License][license-image]][license-url]
[![Build Status][ci-svg]][ci-url]
-Redis OM is a library that helps you build modern Python applications with Redis.
+**Redis OM Python** makes it easy to model Redis data in your Python applications.
**Redis OM Python** | [Redis OM Node.js][redis-om-js] | [Redis OM Spring][redis-om-spring] | [Redis OM .NET][redis-om-dotnet]
Table of contents
-
+span
+
-
-- [Why Redis OM?](#why)
-- [Getting started](#getting-started)
-- [Installation](#installation)
-- [Documentation](#documentation)
-- [Troubleshooting](#troubleshooting)
-- [Contributing](#contributing)
-- [License](#license)
+- [π‘ Why Redis OM?](#-why-redis-om)
+- [π Modeling Your Data](#-modeling-your-data)
+- [β Validating Data With Your Model](#-validating-data-with-your-model)
+- [π Rich Queries and Embedded Models](#-rich-queries-and-embedded-models)
+- [π» Installation](#-installation)
+- [π Documentation](#-documentation)
+- [βοΈ Troubleshooting](#-troubleshooting)
+- [β¨ So, How Do You Get RediSearch and RedisJSON?](#-so-how-do-you-get-redisearch-and-redisjson)
+- [β€οΈ Contributing](#-contributing)
+- [π License](#-license)
-## β‘ Why Redis OM?
+## π‘ Why Redis OM?
-Redis OM is a library of high-level tools that help you build modern Python applications with Redis.
+Redis OM provides high-level abstractions that make it easy to model and query data in Redis with modern Python applications.
-This *preview release* includes our first major component: a **declarative model class** backed by Redis.
+This **preview** release contains the following features:
-## π Getting started
+* Declarative object mapping for Redis objects
+* Declarative secondary-index generation
+* Fluent APIs for querying Redis
-### Object Mapping
+## π Modeling Your Data
-With Redis OM, you get powerful data modeling, validation, and query expressions with a small amount of code.
+Redis OM contains powerful declarative models that give you data validation, serialization, and persistence to Redis.
-Check out this example:
+Check out this example of modeling customer data with Redis OM. First, we create a `Customer` model:
```python
import datetime
from typing import Optional
-from redis_om.model import (
+from pydantic import EmailStr
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: EmailStr
+ join_date: datetime.date
+ age: int
+ bio: Optional[str]
+```
+
+Now that we have a `Customer` model, let's use it to save customer data to Redis.
+
+```python
+import datetime
+from typing import Optional
+
+from pydantic import EmailStr
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: EmailStr
+ join_date: datetime.date
+ age: int
+ bio: Optional[str]
+
+
+# First, we create a new `Customer` object:
+andrew = Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew.brookins@example.com",
+ join_date=datetime.date.today(),
+ age=38,
+ bio="Python developer, works at Redis, Inc."
+)
+
+# The model generates a globally unique primary key automatically
+# without needing to talk to Redis.
+print(andrew.pk)
+# > '01FJM6PH661HCNNRC884H6K30C'
+
+# We can save the model to Redis by calling `save()`:
+andrew.save()
+
+# To retrieve this customer with its primary key, we use `Customer.get()`:
+assert Customer.get(andrew.pk) == andrew
+```
+
+**Ready to learn more?** Check out the [getting started](docs/getting_started.md) guide.
+
+Or, continue reading to see how Redis OM makes data validation a snap.
+
+## β Validating Data With Your Model
+
+Redis OM uses [Pydantic][pydantic-url] to validate data based on the type annotations you assign to fields in a model class.
+
+This validation ensures that fields like `first_name`, which the `Customer` model marked as a `str`, are always strings. **But every Redis OM model is also a Pydantic model**, so you can use Pydantic validators like `EmailStr`, `Pattern`, and many more for complex validations!
+
+For example, because we used the `EmailStr` type for the `email` field, we'll get a validation error if we try to create a `Customer` with an invalid email address:
+
+```python
+import datetime
+from typing import Optional
+
+from pydantic import EmailStr, ValidationError
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: EmailStr
+ join_date: datetime.date
+ age: int
+ bio: Optional[str]
+
+
+try:
+ Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="Not an email address!",
+ join_date=datetime.date.today(),
+ age=38,
+ bio="Python developer, works at Redis, Inc."
+ )
+except ValidationError as e:
+ print(e)
+ """
+ pydantic.error_wrappers.ValidationError: 1 validation error for Customer
+ email
+ value is not a valid email address (type=value_error.email)
+ """
+```
+
+**Any existing Pydantic validator should work** as a drop-in type annotation with a Redis OM model. You can also write arbitrarily complex custom validations!
+
+To learn more, see the [documentation on data validation](docs/validation.md).
+
+## π Rich Queries and Embedded Models
+
+Data modeling, validation, and saving models to Redis all work regardless of how you run Redis.
+
+Next, we'll show you the **rich query expressions** and **embedded models** Redis OM provides when the [RediSearch][redisearch-url] and [RedisJSON][redis-json-url] modules are installed in your Redis deployment, or you're using [Redis Enterprise][redis-enterprise-url].
+
+**TIP**: *Wait, what's a Redis module?* If you aren't familiar with Redis modules, review the [So, How Do You Get RediSearch and RedisJSON?](#-so-how-do-you-get-redisearch-and-redisjson) section of this README.
+
+### Querying
+
+Redis OM comes with a rich query language that allows you to query Redis with Python expressions.
+
+To show how this works, we'll make a small change to the `Customer` model we defined earlier. We'll add `Field(index=True)` to tell Redis OM that we want to index the `last_name` and `age` fields:
+
+```python
+import datetime
+from typing import Optional
+
+from pydantic import EmailStr
+
+from redis_om import (
+ Field,
+ HashModel,
+ Migrator
+)
+from redis_om import get_redis_connection
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str = Field(index=True)
+ email: EmailStr
+ join_date: datetime.date
+ age: int = Field(index=True)
+ bio: Optional[str]
+
+
+# Now, if we use this model with a Redis deployment that has the
+# RediSearch module installed, we can run queries like the following.
+
+# Before running queries, we need to run migrations to set up the
+# indexes that Redis OM will use. You can also use the `migrate`
+# CLI tool for this!
+redis = get_redis_connection()
+Migrator(redis).run()
+
+# Find all customers with the last name "Brookins"
+Customer.find(Customer.last_name == "Brookins").all()
+
+# Find all customers that do NOT have the last name "Brookins"
+Customer.find(Customer.last_name != "Brookins").all()
+
+# Find all customers whose last name is "Brookins" OR whose age is
+# 100 AND whose last name is "Smith"
+Customer.find((Customer.last_name == "Brookins") | (
+ Customer.age == 100
+) & (Customer.last_name == "Smith")).all()
+```
+
+These queries -- and more! -- are possible because **Redis OM manages indexes for you automatically**.
+
+Querying with this index features a rich expression syntax inspired by the Django ORM, SQLAlchemy, and Peewee. We think you'll enjoy it!
+
+To learn more about how to query with Redis OM, see the [documentation on querying](docs/querying.md).
+****
+### Embedded Models
+
+Redis OM can store and query **nested models** like any document database, with the speed and power you get from Redis. Let's see how this works.
+
+In the next example, we'll define a new `Address` model and embed it within the `Customer` model.
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import (
EmbeddedJsonModel,
JsonModel,
Field,
+ Migrator,
)
+from redis_om import get_redis_connection
+
class Address(EmbeddedJsonModel):
address_line_1: str
@@ -78,41 +276,23 @@ class Customer(JsonModel):
# Creates an embedded model.
address: Address
-```
-The example code defines `Address` and `Customer` models for use with a Redis database with the [RedisJSON](redis-json-url) module installed.
-With these two classes defined, you can now:
+# With these two models and a Redis deployment with the RedisJSON
+# module installed, we can run queries like the following.
-* Validate data based on the model's type annotations using [Pydantic](pydantic-url)
-* Persist model instances to Redis as JSON
-* Instantiate model instances from Redis by primary key (a client-generated [ULID](ulid-url))
-* Query on any indexed fields in the models
-
-### Querying
-Querying uses a rich expression syntax inspired by the Django ORM, SQLAlchemy, and Peewee.
-
-Here are a few example queries that use the models we defined earlier:
-
-```python
-# Find all customers with the last name "Brookins"
-Customer.find(Customer.last_name == "Brookins").all()
-
-# Find all customers that do NOT have the last name "Brookins"
-Customer.find(Customer.last_name != "Brookins").all()
-
-# Find all customers whose last name is "Brookins" OR whose age is
-# 100 AND whose last name is "Smith"
-Customer.find((Customer.last_name == "Brookins") | (
- Customer.age == 100
-) & (Customer.last_name == "Smith")).all()
+# Before running queries, we need to run migrations to set up the
+# indexes that Redis OM will use. You can also use the `migrate`
+# CLI tool for this!
+redis = get_redis_connection()
+Migrator(redis).run()
# Find all customers who live in San Antonio, TX
Customer.find(Customer.address.city == "San Antonio",
Customer.address.state == "TX")
```
-Ready to learn more? Read the [getting started](docs/getting_started.md) guide or check out how to [add Redis OM to your FastAPI project](docs/integrating.md).
+To learn more, read the [documentation on embedded models](docs/embedded.md).
## π» Installation
@@ -128,45 +308,22 @@ $ poetry add redis-om
## π Documentation
-Documentation is available [here](docs/index.md).
+The Redis OM documentation is available [here](docs/index.md).
## βοΈ Troubleshooting
-If you run into trouble or have any questions, we're here to help!
+If you run into trouble or have any questions, we're here to help!
First, check the [FAQ](docs/faq.md). If you don't find the answer there,
hit us up on the [Redis Discord Server](http://discord.gg/redis).
-## β¨ RediSearch and RedisJSON
+## β¨ So How Do You Get RediSearch and RedisJSON?
-Redis OM relies on core features from two source available Redis modules: **RediSearch** and **RedisJSON**.
+Some advanced features of Redis OM rely on core features from two source available Redis modules: [RediSearch][redisearch-url] and [RedisJSON][redis-json-url].
-These modules are the "magic" behind the scenes:
+You can run these modules in your self-hosted Redis deployment, or you can use [Redis Enterprise][redis-enterprise-url], which includes both modules.
-* RediSearch adds querying, indexing, and full-text search to Redis
-* RedisJSON adds the JSON data type to Redis
-
-### Why this is important
-
-Without RediSearch or RedisJSON installed, you can still use Redis OM to create declarative models backed by Redis.
-
-We'll store your model data in Redis as Hashes, and you can retrieve models using their primary keys. You'll also get all the validation features from Pydantic.
-
-So, what won't work without these modules?
-
-1. Without RedisJSON, you won't be able to nest models inside each other, like we did with the example model of a `Customer` model that has an `Address` embedded inside it.
-2. Without RediSearch, you won't be able to use our expressive queries to find models -- just primary keys.
-
-### So how do you get RediSearch and RedisJSON?
-
-You can use RediSearch and RedisJSON with your self-hosted Redis deployment. Just follow the instructions on installing the binary versions of the modules in their Quick Start Guides:
-
-- [RedisJSON Quick Start - Running Binaries](https://oss.redis.com/redisjson/#download-and-running-binaries)
-- [RediSearch Quick Start - Running Binaries](https://oss.redis.com/redisearch/Quick_Start/#download_and_running_binaries)
-
-**NOTE**: Both Quick Start Guides also have instructions on how to run these modules in Redis with Docker.
-
-Don't want to run Redis yourself? RediSearch and RedisJSON are also available on Redis Cloud. [Get started here.](https://redis.com/try-free/)
+To learn more, read [our documentation](docs/redis_modules.md).
## β€οΈ Contributing
@@ -176,9 +333,9 @@ We'd love your contributions!
You can also **contribute documentation** -- or just let us know if something needs more detail. [Open an issue on GitHub](https://github.com/redis-om/redis-om-python/issues/new) to get started.
-## License
+## π License
-Redis OM is [MIT licensed][license-url].
+Redis OM uses the [BSD 3-Clause license][license-url].
@@ -188,7 +345,6 @@ Redis OM is [MIT licensed][license-url].
[ci-url]: https://github.com/redis-om/redis-om-python/actions/workflows/build.yml
[license-image]: http://img.shields.io/badge/license-MIT-green.svg?style=flat-square
[license-url]: LICENSE
-
[redis-om-website]: https://developer.redis.com
@@ -199,4 +355,4 @@ Redis OM is [MIT licensed][license-url].
[redis-json-url]: https://oss.redis.com/redisjson/
[pydantic-url]: https://github.com/samuelcolvin/pydantic
[ulid-url]: https://github.com/ulid/spec
-
+[redis-enterprise-url]: https://redis.com/try-free/
diff --git a/aredis_om/__init__.py b/aredis_om/__init__.py
new file mode 100644
index 0000000..598ec29
--- /dev/null
+++ b/aredis_om/__init__.py
@@ -0,0 +1,15 @@
+from .checks import has_redis_json, has_redisearch
+from .connections import get_redis_connection
+from .model.migrations.migrator import MigrationError, Migrator
+from .model.model import (
+ EmbeddedJsonModel,
+ Field,
+ FindQuery,
+ HashModel,
+ JsonModel,
+ NotFoundError,
+ QueryNotSupportedError,
+ QuerySyntaxError,
+ RedisModel,
+ RedisModelError,
+)
diff --git a/aredis_om/checks.py b/aredis_om/checks.py
new file mode 100644
index 0000000..00c5084
--- /dev/null
+++ b/aredis_om/checks.py
@@ -0,0 +1,28 @@
+from functools import lru_cache
+from typing import List
+
+from aredis_om.connections import get_redis_connection
+
+
+@lru_cache(maxsize=None)
+async def get_modules(conn) -> List[str]:
+ modules = await conn.execute_command("module", "list")
+ return [m[1] for m in modules]
+
+
+@lru_cache(maxsize=None)
+async def has_redis_json(conn=None):
+ if conn is None:
+ conn = get_redis_connection()
+ names = await get_modules(conn)
+ return b"ReJSON" in names or "ReJSON" in names
+
+
+@lru_cache(maxsize=None)
+async def has_redisearch(conn=None):
+ if conn is None:
+ conn = get_redis_connection()
+ if has_redis_json(conn):
+ return True
+ names = await get_modules(conn)
+ return b"search" in names or "search" in names
diff --git a/redis_om/connections.py b/aredis_om/connections.py
similarity index 57%
rename from redis_om/connections.py
rename to aredis_om/connections.py
index 80eecf3..59d578d 100644
--- a/redis_om/connections.py
+++ b/aredis_om/connections.py
@@ -1,28 +1,22 @@
import os
-from typing import Union
-import dotenv
import aioredis
-import redis
-from redis_om.unasync_util import ASYNC_MODE
+import dotenv
+
dotenv.load_dotenv()
URL = os.environ.get("REDIS_OM_URL", None)
-if ASYNC_MODE:
- client = aioredis.Redis
-else:
- client = redis.Redis
-def get_redis_connection(**kwargs) -> Union[aioredis.Redis, redis.Redis]:
+def get_redis_connection(**kwargs) -> aioredis.Redis:
# If someone passed in a 'url' parameter, or specified a REDIS_OM_URL
# environment variable, we'll create the Redis client from the URL.
url = kwargs.pop("url", URL)
if url:
- return client.from_url(url, **kwargs)
+ return aioredis.Redis.from_url(url, **kwargs)
# Decode from UTF-8 by default
if "decode_responses" not in kwargs:
kwargs["decode_responses"] = True
- return client(**kwargs)
+ return aioredis.Redis(**kwargs)
diff --git a/redis_om/model/__init__.py b/aredis_om/model/__init__.py
similarity index 57%
rename from redis_om/model/__init__.py
rename to aredis_om/model/__init__.py
index 8fe2844..7df1623 100644
--- a/redis_om/model/__init__.py
+++ b/aredis_om/model/__init__.py
@@ -1 +1,2 @@
+from .migrations.migrator import MigrationError, Migrator
from .model import EmbeddedJsonModel, Field, HashModel, JsonModel, RedisModel
diff --git a/redis_om/__init__.py b/aredis_om/model/cli/__init__.py
similarity index 100%
rename from redis_om/__init__.py
rename to aredis_om/model/cli/__init__.py
diff --git a/redis_om/model/cli/migrate.py b/aredis_om/model/cli/migrate.py
similarity index 74%
rename from redis_om/model/cli/migrate.py
rename to aredis_om/model/cli/migrate.py
index 5c3c442..c73386c 100644
--- a/redis_om/model/cli/migrate.py
+++ b/aredis_om/model/cli/migrate.py
@@ -1,10 +1,10 @@
import click
-from redis_om.model.migrations.migrator import Migrator
+from aredis_om.model.migrations.migrator import Migrator
@click.command()
-@click.option("--module", default="redis_om")
+@click.option("--module", default="aredis_om")
def migrate(module):
migrator = Migrator(module)
diff --git a/redis_om/model/encoders.py b/aredis_om/model/encoders.py
similarity index 100%
rename from redis_om/model/encoders.py
rename to aredis_om/model/encoders.py
diff --git a/redis_om/model/cli/__init__.py b/aredis_om/model/migrations/__init__.py
similarity index 100%
rename from redis_om/model/cli/__init__.py
rename to aredis_om/model/migrations/__init__.py
diff --git a/redis_om/model/migrations/migrator.py b/aredis_om/model/migrations/migrator.py
similarity index 86%
rename from redis_om/model/migrations/migrator.py
rename to aredis_om/model/migrations/migrator.py
index 0f11e11..c1c5d1f 100644
--- a/redis_om/model/migrations/migrator.py
+++ b/aredis_om/model/migrations/migrator.py
@@ -2,12 +2,11 @@ import hashlib
import logging
from dataclasses import dataclass
from enum import Enum
-from typing import Optional, Union
+from typing import List, Optional
-from redis import ResponseError, Redis
-from aioredis import ResponseError as AResponseError, Redis as ARedis
+from aioredis import Redis, ResponseError
-from redis_om.model.model import model_registry
+from aredis_om.model.model import model_registry
log = logging.getLogger(__name__)
@@ -42,10 +41,10 @@ def schema_hash_key(index_name):
return f"{index_name}:hash"
-async def create_index(redis: Union[Redis, ARedis], index_name, schema, current_hash):
+async def create_index(redis: Redis, index_name, schema, current_hash):
try:
await redis.execute_command(f"ft.info {index_name}")
- except (ResponseError, AResponseError):
+ except ResponseError:
await redis.execute_command(f"ft.create {index_name} {schema}")
await redis.set(schema_hash_key(index_name), current_hash)
else:
@@ -64,7 +63,7 @@ class IndexMigration:
schema: str
hash: str
action: MigrationAction
- redis: Union[Redis, ARedis]
+ redis: Redis
previous_hash: Optional[str] = None
async def run(self):
@@ -87,9 +86,9 @@ class IndexMigration:
class Migrator:
- def __init__(self, redis: Union[Redis, ARedis], module=None):
+ def __init__(self, redis: Redis, module=None):
self.module = module
- self.migrations = []
+ self.migrations: List[IndexMigration] = []
self.redis = redis
async def run(self):
@@ -108,7 +107,7 @@ class Migrator:
try:
await self.redis.execute_command("ft.info", cls.Meta.index_name)
- except (ResponseError, AResponseError):
+ except ResponseError:
self.migrations.append(
IndexMigration(
name,
@@ -116,12 +115,12 @@ class Migrator:
schema,
current_hash,
MigrationAction.CREATE,
- self.redis
+ self.redis,
)
)
continue
- stored_hash = self.redis.get(hash_key)
+ stored_hash = await self.redis.get(hash_key)
schema_out_of_date = current_hash != stored_hash
if schema_out_of_date:
@@ -134,7 +133,7 @@ class Migrator:
current_hash,
MigrationAction.DROP,
self.redis,
- stored_hash
+ stored_hash,
)
)
self.migrations.append(
@@ -145,7 +144,7 @@ class Migrator:
current_hash,
MigrationAction.CREATE,
self.redis,
- stored_hash
+ stored_hash,
)
)
diff --git a/redis_om/model/model.py b/aredis_om/model/model.py
similarity index 88%
rename from redis_om/model/model.py
rename to aredis_om/model/model.py
index 4d295c5..eb65fa3 100644
--- a/redis_om/model/model.py
+++ b/aredis_om/model/model.py
@@ -10,6 +10,7 @@ from functools import reduce
from typing import (
AbstractSet,
Any,
+ AsyncGenerator,
Callable,
Dict,
List,
@@ -28,21 +29,22 @@ from typing import (
)
import aioredis
-import redis
+from aioredis.client import Pipeline
from pydantic import BaseModel, validator
from pydantic.fields import FieldInfo as PydanticFieldInfo
from pydantic.fields import ModelField, Undefined, UndefinedType
-from pydantic.main import ModelMetaclass
+from pydantic.main import ModelMetaclass, validate_model
from pydantic.typing import NoArgAnyCallable
from pydantic.utils import Representation
-from redis.client import Pipeline
from ulid import ULID
-from redis_om.connections import get_redis_connection
+from ..checks import has_redis_json, has_redisearch
+from ..connections import get_redis_connection
+from ..unasync_util import ASYNC_MODE
from .encoders import jsonable_encoder
from .render_tree import render_tree
from .token_escaper import TokenEscaper
-from ..unasync_util import ASYNC_MODE
+
model_registry = {}
_T = TypeVar("_T")
@@ -116,33 +118,52 @@ def is_supported_container_type(typ: Optional[type]) -> bool:
def validate_model_fields(model: Type["RedisModel"], field_values: Dict[str, Any]):
for field_name in field_values.keys():
+ if "__" in field_name:
+ obj = model
+ for sub_field in field_name.split("__"):
+ if not hasattr(obj, sub_field):
+ raise QuerySyntaxError(
+ f"The update path {field_name} contains a field that does not "
+ f"exit on {model.__name__}. The field is: {sub_field}"
+ )
+ obj = getattr(obj, sub_field)
+ return
+
if field_name not in model.__fields__:
raise QuerySyntaxError(
f"The field {field_name} does not exist on the model {model.__name__}"
)
-class ExpressionProtocol(Protocol):
- op: Operators
- left: ExpressionOrModelField
- right: ExpressionOrModelField
+def decode_redis_value(
+ obj: Union[List[bytes], Dict[bytes, bytes], bytes], encoding: str
+) -> Union[List[str], Dict[str, str], str]:
+ """Decode a binary-encoded Redis hash into the specified encoding."""
+ if isinstance(obj, list):
+ return [v.decode(encoding) for v in obj]
+ if isinstance(obj, dict):
+ return {
+ key.decode(encoding): value.decode(encoding) for key, value in obj.items()
+ }
+ elif isinstance(obj, bytes):
+ return obj.decode(encoding)
- def __invert__(self) -> "Expression":
- pass
- def __and__(self, other: ExpressionOrModelField):
- pass
+class PipelineError(Exception):
+ """A Redis pipeline error."""
- def __or__(self, other: ExpressionOrModelField):
- pass
- @property
- def name(self) -> str:
- raise NotImplementedError
-
- @property
- def tree(self) -> str:
- raise NotImplementedError
+def verify_pipeline_response(
+ response: List[Union[bytes, str]], expected_responses: int = 0
+):
+ # TODO: More generic pipeline verification here (what else is possible?),
+ # plus hash and JSON-specific verifications in separate functions.
+ actual_responses = len(response)
+ if actual_responses != expected_responses:
+ raise PipelineError(
+ f"We expected {expected_responses}, but the Redis "
+ f"pipeline returned {actual_responses} responses."
+ )
@dataclasses.dataclass
@@ -318,6 +339,13 @@ class FindQuery:
page_size: int = DEFAULT_PAGE_SIZE,
sort_fields: Optional[List[str]] = None,
):
+ if not has_redisearch(model.db()):
+ raise RedisModelError(
+ "Your Redis instance does not have either the RediSearch module "
+ "or RedisJSON module installed. Querying requires that your Redis "
+ "instance has one of these modules installed."
+ )
+
self.expressions = expressions
self.model = model
self.offset = offset
@@ -331,10 +359,10 @@ class FindQuery:
self._expression = None
self._query: Optional[str] = None
- self._pagination: list[str] = []
- self._model_cache: list[RedisModel] = []
+ self._pagination: List[str] = []
+ self._model_cache: List[RedisModel] = []
- def dict(self) -> dict[str, Any]:
+ def dict(self) -> Dict[str, Any]:
return dict(
model=self.model,
offset=self.offset,
@@ -757,7 +785,7 @@ class FindQuery:
if pipeline:
# TODO: Response type?
# TODO: Better error detection for transactions.
- pipeline.execute()
+ await pipeline.execute()
async def delete(self):
"""Delete all matching records in this query."""
@@ -787,8 +815,10 @@ class FindQuery:
give it a new offset and limit: offset=n, limit=1.
"""
if ASYNC_MODE:
- raise QuerySyntaxError("Cannot use [] notation with async code. "
- "Use FindQuery.get_item() instead.")
+ raise QuerySyntaxError(
+ "Cannot use [] notation with async code. "
+ "Use FindQuery.get_item() instead."
+ )
if self._model_cache and len(self._model_cache) >= item:
return self._model_cache[item]
@@ -821,7 +851,7 @@ class FindQuery:
return result[0]
-class PrimaryKeyCreator(Protocol):
+class PrimaryKeyCreator(abc.ABC):
def create_pk(self, *args, **kwargs) -> str:
"""Create a new primary key"""
@@ -938,7 +968,7 @@ class PrimaryKey:
field: ModelField
-class MetaProtocol(Protocol):
+class BaseMeta(abc.ABC):
global_key_prefix: str
model_key_prefix: str
primary_key_pattern: str
@@ -948,6 +978,7 @@ class MetaProtocol(Protocol):
index_name: str
abstract: bool
embedded: bool
+ encoding: str
@dataclasses.dataclass
@@ -961,16 +992,17 @@ class DefaultMeta:
global_key_prefix: Optional[str] = None
model_key_prefix: Optional[str] = None
primary_key_pattern: Optional[str] = None
- database: Optional[Union[redis.Redis, aioredis.Redis]] = None
+ database: Optional[aioredis.Redis] = None
primary_key: Optional[PrimaryKey] = None
primary_key_creator_cls: Optional[Type[PrimaryKeyCreator]] = None
index_name: Optional[str] = None
abstract: Optional[bool] = False
embedded: Optional[bool] = False
+ encoding: str = "utf-8"
class ModelMeta(ModelMetaclass):
- _meta: MetaProtocol
+ _meta: BaseMeta
def __new__(cls, name, bases, attrs, **kwargs): # noqa C901
meta = attrs.pop("Meta", None)
@@ -1036,10 +1068,13 @@ class ModelMeta(ModelMetaclass):
new_class._meta.database = getattr(
base_meta, "database", get_redis_connection()
)
+ if not getattr(new_class._meta, "encoding", None):
+ new_class._meta.encoding = getattr(base_meta, "encoding")
if not getattr(new_class._meta, "primary_key_creator_cls", None):
new_class._meta.primary_key_creator_cls = getattr(
base_meta, "primary_key_creator_cls", UlidPrimaryKey
)
+ # TODO: Configurable key separate, defaults to ":"
if not getattr(new_class._meta, "index_name", None):
new_class._meta.index_name = (
f"{new_class._meta.global_key_prefix}:"
@@ -1082,7 +1117,7 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
return await self.db().delete(self.key())
@classmethod
- async def get(cls, pk: Any) -> 'RedisModel':
+ async def get(cls, pk: Any) -> "RedisModel":
raise NotImplementedError
async def update(self, **field_values):
@@ -1092,7 +1127,7 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
async def save(self, pipeline: Optional[Pipeline] = None) -> "RedisModel":
raise NotImplementedError
- @validator("pk", always=True)
+ @validator("pk", always=True, allow_reuse=True)
def validate_pk(cls, v):
if not v:
v = cls._meta.primary_key_creator_cls().create_pk()
@@ -1191,19 +1226,45 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
return d
@classmethod
- async def add(cls, models: Sequence["RedisModel"]) -> Sequence["RedisModel"]:
- # TODO: Add transaction support
- return [await model.save() for model in models]
+ async def add(
+ cls,
+ models: Sequence["RedisModel"],
+ pipeline: Optional[Pipeline] = None,
+ pipeline_verifier: Callable[..., Any] = verify_pipeline_response,
+ ) -> Sequence["RedisModel"]:
+ if pipeline is None:
+ # By default, send commands in a pipeline. Saving each model will
+ # be atomic, but Redis may process other commands in between
+ # these saves.
+ db = cls.db().pipeline(transaction=False)
+ else:
+ # If the user gave us a pipeline, add our commands to that. The user
+ # will be responsible for executing the pipeline after they've accumulated
+ # the commands they want to send.
+ db = pipeline
- @classmethod
- def values(cls):
- """Return raw values from Redis instead of model instances."""
- raise NotImplementedError
+ for model in models:
+ # save() just returns the model, we don't need that here.
+ await model.save(pipeline=db)
+
+ # If the user didn't give us a pipeline, then we need to execute
+ # the one we just created.
+ if pipeline is None:
+ result = await db.execute()
+ pipeline_verifier(result, expected_responses=len(models))
+
+ return models
@classmethod
def redisearch_schema(cls):
raise NotImplementedError
+ def check(self):
+ """Run all validations."""
+ *_, validation_error = validate_model(self.__class__, self.__dict__)
+ if validation_error:
+ raise validation_error
+
class HashModel(RedisModel, abc.ABC):
def __init_subclass__(cls, **kwargs):
@@ -1223,6 +1284,7 @@ class HashModel(RedisModel, abc.ABC):
)
async def save(self, pipeline: Optional[Pipeline] = None) -> "HashModel":
+ self.check()
if pipeline is None:
db = self.db()
else:
@@ -1232,12 +1294,39 @@ class HashModel(RedisModel, abc.ABC):
await db.hset(self.key(), mapping=document)
return self
+ @classmethod
+ async def all_pks(cls) -> AsyncGenerator[str, None]: # type: ignore
+ key_prefix = cls.make_key(cls._meta.primary_key_pattern.format(pk=""))
+ # TODO: We assume the key ends with the default separator, ":" -- when
+ # we make the separator configurable, we need to update this as well.
+ # ... And probably lots of other places ...
+ #
+ # TODO: Also, we need to decide how we want to handle the lack of
+ # decode_responses=True...
+ return (
+ key.split(":")[-1]
+ if isinstance(key, str)
+ else key.decode(cls.Meta.encoding).split(":")[-1]
+ async for key in cls.db().scan_iter(f"{key_prefix}*", _type="HASH")
+ )
+
@classmethod
async def get(cls, pk: Any) -> "HashModel":
- document = cls.db().hgetall(cls.make_primary_key(pk))
+ document = await cls.db().hgetall(cls.make_primary_key(pk))
if not document:
raise NotFoundError
- return cls.parse_obj(document)
+ try:
+ result = cls.parse_obj(document)
+ except TypeError as e:
+ log.warning(
+ f'Could not parse Redis response. Error was: "{e}". Probably, the '
+ "connection is not set to decode responses from bytes. "
+ "Attempting to decode response using the encoding set on "
+ f"model class ({cls.__class__}. Encoding: {cls.Meta.encoding}."
+ )
+ document = decode_redis_value(document, cls.Meta.encoding)
+ result = cls.parse_obj(document)
+ return result
@classmethod
@no_type_check
@@ -1260,6 +1349,12 @@ class HashModel(RedisModel, abc.ABC):
schema_parts = [schema_prefix] + cls.schema_for_fields()
return " ".join(schema_parts)
+ async def update(self, **field_values):
+ validate_model_fields(self.__class__, field_values)
+ for field, value in field_values.items():
+ setattr(self, field, value)
+ await self.save()
+
@classmethod
def schema_for_fields(cls):
schema_parts = []
@@ -1342,10 +1437,16 @@ class HashModel(RedisModel, abc.ABC):
class JsonModel(RedisModel, abc.ABC):
def __init_subclass__(cls, **kwargs):
+ if not has_redis_json(cls.db()):
+ log.error(
+ "Your Redis instance does not have the RedisJson module "
+ "loaded. JsonModel depends on RedisJson."
+ )
# Generate the RediSearch schema once to validate fields.
cls.redisearch_schema()
async def save(self, pipeline: Optional[Pipeline] = None) -> "JsonModel":
+ self.check()
if pipeline is None:
db = self.db()
else:
@@ -1357,7 +1458,25 @@ class JsonModel(RedisModel, abc.ABC):
async def update(self, **field_values):
validate_model_fields(self.__class__, field_values)
for field, value in field_values.items():
- setattr(self, field, value)
+ # Handle the simple update case first, e.g. city="Happy Valley"
+ if "__" not in field:
+ setattr(self, field, value)
+ continue
+
+ # Handle the nested update field name case, e.g. address__city="Happy Valley"
+ obj = self
+ parts = field.split("__")
+ path_to_field = parts[:-1]
+ target_field = parts[-1]
+
+ # Get the final object in a nested update field name, e.g. for
+ # the string address__city, we want to get self.address.city
+ for sub_field in path_to_field:
+ obj = getattr(obj, sub_field)
+
+ # Set the target field (the last "part" of the nested update
+ # field name) to the target value.
+ setattr(obj, target_field, value)
await self.save()
@classmethod
diff --git a/redis_om/model/query_resolver.py b/aredis_om/model/query_resolver.py
similarity index 94%
rename from redis_om/model/query_resolver.py
rename to aredis_om/model/query_resolver.py
index f27fc36..3657970 100644
--- a/redis_om/model/query_resolver.py
+++ b/aredis_om/model/query_resolver.py
@@ -1,7 +1,6 @@
-from collections import Sequence
-from typing import Any, Dict, List, Mapping, Union
+from typing import List, Mapping
-from redis_om.model.model import Expression
+from aredis_om.model.model import Expression
class LogicalOperatorForListOfExpressions(Expression):
diff --git a/redis_om/model/render_tree.py b/aredis_om/model/render_tree.py
similarity index 100%
rename from redis_om/model/render_tree.py
rename to aredis_om/model/render_tree.py
diff --git a/redis_om/model/token_escaper.py b/aredis_om/model/token_escaper.py
similarity index 100%
rename from redis_om/model/token_escaper.py
rename to aredis_om/model/token_escaper.py
diff --git a/redis_om/unasync_util.py b/aredis_om/unasync_util.py
similarity index 98%
rename from redis_om/unasync_util.py
rename to aredis_om/unasync_util.py
index 093dcb3..3bea28f 100644
--- a/redis_om/unasync_util.py
+++ b/aredis_om/unasync_util.py
@@ -2,6 +2,7 @@
import inspect
+
_original_next = next
@@ -37,4 +38,4 @@ next = _original_next
def return_non_coro(x):
- return x
\ No newline at end of file
+ return x
diff --git a/build.py b/build.py
deleted file mode 100644
index 12a8f25..0000000
--- a/build.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import unasync
-
-
-def build(setup_kwargs):
- setup_kwargs.update(
- {"cmdclass": {'build_py': unasync.cmdclass_build_py(rules=[
- unasync.Rule("/aredis_om/", "/redis_om/"),
- unasync.Rule("/aredis_om/tests/", "/redis_om/tests/", additional_replacements={"aredis_om": "redis_om"}),
- ])}}
- )
diff --git a/docker-compose.yml b/docker-compose.yml
index 87e406a..f333d22 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -9,3 +9,11 @@ services:
- "6380:6379"
volumes:
- ./data:/data
+
+ oss_redis:
+ image: "redis:latest"
+ restart: always
+ ports:
+ - "6381:6379"
+ volumes:
+ - ./oss_data:/oss_data
\ No newline at end of file
diff --git a/docs/connections.md b/docs/connections.md
new file mode 100644
index 0000000..10eff89
--- /dev/null
+++ b/docs/connections.md
@@ -0,0 +1,3 @@
+# Managing Connections
+
+WIP!
\ No newline at end of file
diff --git a/docs/embedded_models.md b/docs/embedded_models.md
new file mode 100644
index 0000000..d08b7ba
--- /dev/null
+++ b/docs/embedded_models.md
@@ -0,0 +1,54 @@
+# Embedded Models
+
+**NOTE:** This documentation is a stub, using the same embedded JSON model example as the README.
+
+Redis OM can store and query **nested models** like any document database, with the speed and power you get from Redis. Let's see how this works.
+
+In the next example, we'll define a new `Address` model and embed it within the `Customer` model.
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import (
+ EmbeddedJsonModel,
+ JsonModel,
+ Field,
+ Migrator
+)
+
+
+class Address(EmbeddedJsonModel):
+ address_line_1: str
+ address_line_2: Optional[str]
+ city: str = Field(index=True)
+ state: str = Field(index=True)
+ country: str
+ postal_code: str = Field(index=True)
+
+
+class Customer(JsonModel):
+ first_name: str = Field(index=True)
+ last_name: str = Field(index=True)
+ email: str = Field(index=True)
+ join_date: datetime.date
+ age: int = Field(index=True)
+ bio: Optional[str] = Field(index=True, full_text_search=True,
+ default="")
+
+ # Creates an embedded model.
+ address: Address
+
+
+# With these two models and a Redis deployment with the RedisJSON
+# module installed, we can run queries like the following.
+
+# Before running queries, we need to run migrations to set up the
+# indexes that Redis OM will use. You can also use the `migrate`
+# CLI tool for this!
+Migrator().run()
+
+# Find all customers who live in San Antonio, TX
+Customer.find(Customer.address.city == "San Antonio",
+ Customer.address.state == "TX")
+```
\ No newline at end of file
diff --git a/docs/faq.md b/docs/faq.md
index e69de29..78203b2 100644
--- a/docs/faq.md
+++ b/docs/faq.md
@@ -0,0 +1,3 @@
+# Frequently Asked Questions (FAQ)
+
+WIP!
\ No newline at end of file
diff --git a/docs/fastapi_integration.md b/docs/fastapi_integration.md
new file mode 100644
index 0000000..5a8d771
--- /dev/null
+++ b/docs/fastapi_integration.md
@@ -0,0 +1,134 @@
+# FastAPI Integration
+
+## Introduction
+
+This section includes a complete example showing how to integrate Redis OM with FastAPI.
+
+Good news: Redis OM was **specifically designed to integrate with FastAPI**!
+
+## Concepts
+
+### Every Redis OM Model is also a Pydantic model
+
+Every Redis OM model is also a Pydantic model, so you can define a model and then use the model class anywhere that FastAPI expects a Pydantic model.
+
+This means a couple of things:
+
+1. A Redis OM model can be used for request body validation
+2. Redis OM models show up in the auto-generated API documentation
+
+### Cache vs. Data
+
+Redis works well as either a durable data store or a cache, but the optimal Redis configuration is often different between these two use cases.
+
+You almost always want to use a Redis instance tuned for caching when you're caching and a separate Redis instance tuned for data durability for storing application state.
+
+This example shows how to manage these two uses of Redis within the same application. The app uses a FastAPI caching framework and dedicated caching instance of Redis for caching, and a separate Redis instance tuned for durability for Redis OM models.
+
+
+## Example app code
+
+This is a complete example that you can run as-is:
+
+```python
+import datetime
+from typing import Optional
+
+import aioredis
+
+from fastapi import FastAPI, HTTPException
+from starlette.requests import Request
+from starlette.responses import Response
+
+from fastapi_cache import FastAPICache
+from fastapi_cache.backends.redis import RedisBackend
+from fastapi_cache.decorator import cache
+
+from pydantic import EmailStr
+
+from redis_om import HashModel, NotFoundError
+from redis_om import get_redis_connection
+
+# This Redis instance is tuned for durability.
+REDIS_DATA_URL = "redis://localhost:6380"
+
+# This Redis instance is tuned for cache performance.
+REDIS_CACHE_URL = "redis://localhost:6381"
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: EmailStr
+ join_date: datetime.date
+ age: int
+ bio: Optional[str]
+
+
+app = FastAPI()
+
+
+@app.post("/customer")
+async def save_customer(customer: Customer):
+ # We can save the model to Redis by calling `save()`:
+ return customer.save()
+
+
+@app.get("/customers")
+async def list_customers(request: Request, response: Response):
+ # To retrieve this customer with its primary key, we use `Customer.get()`:
+ return {"customers": Customer.all_pks()}
+
+
+@app.get("/customer/{pk}")
+@cache(expire=10)
+async def get_customer(pk: str, request: Request, response: Response):
+ # To retrieve this customer with its primary key, we use `Customer.get()`:
+ try:
+ return Customer.get(pk)
+ except NotFoundError:
+ raise HTTPException(status_code=404, detail="Customer not found")
+
+
+@app.on_event("startup")
+async def startup():
+ r = aioredis.from_url(REDIS_CACHE_URL, encoding="utf8",
+ decode_responses=True)
+ FastAPICache.init(RedisBackend(r), prefix="fastapi-cache")
+
+ # You can set the Redis OM URL using the REDIS_OM_URL environment
+ # variable, or by manually creating the connection using your model's
+ # Meta object.
+ Customer.Meta.database = get_redis_connection(url=REDIS_DATA_URL,
+ decode_responses=True)
+```
+
+## Testing the app
+
+You should install the app's dependencies first. This app uses Poetry, so you'll want to make sure you have that installed first:
+
+ $ pip install poetry
+
+Then install the dependencies:
+
+ $ poetry install
+
+Next, start the server:
+
+ $ poetry run uvicorn --reload main:test
+
+Then, in another shell, create a customer:
+
+ $ curl -X POST "http://localhost:8000/customer" -H 'Content-Type: application/json' -d '{"first_name":"Andrew","last_name":"Brookins","email":"a@example.com","age":"38","join_date":"2020
+-01-02"}'
+ {"pk":"01FM2G8EP38AVMH7PMTAJ123TA","first_name":"Andrew","last_name":"Brookins","email":"a@example.com","join_date":"2020-01-02","age":38,"bio":""}
+
+Get a copy of the value for "pk" and make another request to get that customer:
+
+ $ curl "http://localhost:8000/customer/01FM2G8EP38AVMH7PMTAJ123TA"
+ {"pk":"01FM2G8EP38AVMH7PMTAJ123TA","first_name":"Andrew","last_name":"Brookins","email":"a@example.com","join_date":"2020-01-02","age":38,"bio":""}
+
+You can also get a list of all customer PKs:
+
+ $ curl "http://localhost:8000/customers"
+ {"customers":["01FM2G8EP38AVMH7PMTAJ123TA"]}
\ No newline at end of file
diff --git a/docs/getting_started.md b/docs/getting_started.md
index e69de29..4c50e9d 100644
--- a/docs/getting_started.md
+++ b/docs/getting_started.md
@@ -0,0 +1,716 @@
+# Getting Started With Redis OM
+
+## Introduction
+
+This tutorial will walk you through installing Redis OM, creating your first model, and using it to save and validate data.
+
+## Prerequisites
+
+Redis OM requires Python version 3.9 or above and a Redis instance to connect to.
+
+## Python
+
+Make sure you are running **Python version 3.9 or higher**:
+
+```
+python --version
+Python 3.9.0
+```
+
+If you don't have Python installed, you can download it from [Python.org](https://www.python.org/downloads/), use [Pyenv](https://github.com/pyenv/pyenv), or install Python with your operating system's package manager.
+
+## Redis
+
+Redis OM saves data in Redis, so you will need Redis installed and running to complete this tutorial.
+
+### Downloading Redis
+
+The latest version of Redis is available from [Redis.io](https://redis.io/). You can also install Redis with your operating system's package manager.
+
+**NOTE:** This tutorial will guide you through starting Redis locally, but the instructions will also work if Redis is running on a remote server.
+
+### Installing Redis On Windows
+
+Redis doesn't run directly on Windows, but you can use Windows Subsystem for Linux (WSL) to run Redis. See [our video on YouTube](https://youtu.be/_nFwPTHOMIY) for a walk-through.
+
+Windows users can also use Docker. See the next section on running Redis with Docker for more information.
+
+### Using Redis With Docker
+
+Instead of installing Redis manually or with a package manager, you can run Redis with Docker.
+
+We recommend the [redismod](https://hub.docker.com/r/redislabs/redismod) image because it includes Redis modules that Redis OM can use to give you extra features. Later sections of this guide will provide more detail about these features.
+
+You can also use the official Redis Docker image, which is hosted on [Docker Hub](https://hub.docker.com/_/redis).
+
+**NOTE**: We'll talk about how to actually start Redis with Docker when we discuss _running_ Redis later in this guide.
+
+## Recommended: RediSearch and RedisJSON
+
+Redis OM relies on the [RediSearch][redisearch-url] and [RedisJSON][redis-json-url] Redis modules to support [rich queries](querying.md) and [embedded models](embedded_models.md).
+
+You don't need these Redis modules to use Redis OM's data modeling, validation, and persistence features, but we recommend them to get the most out of Redis OM.
+
+The easiest way to run these Redis modules during local development is to use the [redismod](https://hub.docker.com/r/redislabs/redismod) Docker image.
+
+For other installation methods, follow the "Quick Start" guides on both modules' home pages.
+
+## Starting Redis
+
+Before you get started with Redis OM, make sure you start Redis.
+
+The command to start Redis will depend on how you installed it.
+
+### Ubuntu Linux (Including WSL)
+
+If you installed Redis using `apt`, start it with the `systemctl` command:
+
+ $ sudo systemctl restart redis.service
+
+Otherwise, you can start the server manually:
+
+ $ redis-server start
+
+### macOS with Homebrew
+
+ $ brew services start redis
+
+### Docker
+
+The command to start Redis with Docker depends on the image you've chosen to use.
+
+**TIP:** The `-d` option in these examples runs Redis in the background, while `-p 6379:6379` makes Redis reachable at port 6379 on your localhost.
+
+#### Docker with the `redismod` image (recommended)
+
+ $ docker run -d -p 6379:6379 redislabs/redismod
+
+### Docker with the `redis` image
+
+ $ docker run -d -p 6379:6379 redis
+
+## Installing Redis OM
+
+The recommended way to install Redis OM is with [Poetry](https://python-poetry.org/docs/). You can install Redis OM using Poetry with the following command:
+
+ $ poetry install redis-om
+
+If you're using Pipenv, the command is:
+
+ $ pipenv install redis-om
+
+Finally, you can install Redis OM with `pip` by running the following command:
+
+ $ pip install redis-om
+
+**TIP:** If you aren't using Poetry or Pipenv and are instead installing directly with `pip`, we recommend that you install Redis OM in a virtual environment (AKA, a virtualenv). If you aren't familiar with this concept, see [Dan Bader's video and transcript](https://realpython.com/lessons/creating-virtual-environment/).
+
+
+## Setting the Redis URL Environment Variable
+
+We're almost ready to create a Redis OM model! But first, we need to make sure that Redis OM knows how to connect to Redis.
+
+By default, Redis OM tries to connect to Redis on your localhost at port 6379. Most local install methods will result in Redis running at this location, in which case you don't need to do anything special.
+
+However, if you configured Redis to run on a different port, or if you're using a remote Redis server, you'll need to set the `REDIS_OM_URL` environment variable.
+
+The `REDIS_OM_URL` environment variable follows the redis-py URL format:
+
+ redis://[[username]:[password]]@localhost:6379/[database number]
+
+The default connection is equivalent to the following `REDIS_OM_URL` environment variable:
+
+ redis://@localhost:6379
+
+**TIP:** Redis databases are numbered, and the default is 0. You can leave off the database number to use the default database.
+
+Other supported prefixes include "rediss" for SSL connections and "unix" for Unix domain sockets:
+
+ rediss://[[username]:[password]]@localhost:6379/0
+ unix://[[username]:[password]]@/path/to/socket.sock?db=0
+
+For more details about how to connect to Redis with Redis OM, see the [connections documentation](connections.md).
+
+### Redis Cluster Support
+
+Redis OM supports connecting to Redis Cluster, but this preview release does not support doing so with the `REDIS_OM_URL` environment variable. However, you can connect by manually creating a connection object.
+
+See the [connections documentation](connections.md) for examples of connecting to Redis Cluster.
+
+Support for connecting to Redis Cluster via `REDIS_OM_URL` will be added in a future release.
+
+## Defining a Model
+
+In this tutorial, we'll create a `Customer` model that validates and saves data. Let's start with a basic definition of the model. We'll add features as we go along.
+
+```python
+import datetime
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: str
+```
+
+There are a few details to note:
+
+1. Our `Customer` model extends the `HashModel` class. This means that it will be saved to Redis as a hash. The other model class that Redis OM provides is `JsonModel`, which we'll discuss later.
+2. We've specified the model's fields using Python type annotations.
+
+Let's dig into these two details a bit more.
+
+### The HashModel Class
+
+When you subclass `HashModel`, your subclass is both a Redis OM model, with methods for saving data to Redis, *and* a Pydantic model.
+
+This means that you can use Pydantic field validations with your Redis OM models, which we'll cover later, when we talk about validation. But this also means you can use Redis OM models anywhere you would use a Pydantic model, like in your FastAPI applications. π€―
+
+### Type Annotations
+
+The type annotations you add to your model fields are used for a few purposes:
+
+* Validating data with Pydantic validators
+* Serializing data Redis
+* Deserializing data from Redis
+
+We'll see examples of these throughout the course of this tutorial.
+
+An important detail about the `HashModel` class is that it does not support `list`, `set`, or mapping (like `dict`) types. This is because Redis hashes cannot contain lists, sets, or other hashes.
+
+If you want to model fields with a list, set, or mapping type, or another model, you'll need to use the `JsonModel` class, which can support these types, as well as embedded models.
+
+## Creating Models
+
+Let's see what creating a model object looks like:
+
+```python
+import datetime
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: str
+
+
+andrew = Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew.brookins@example.com",
+ join_date=datetime.date.today(),
+ age=38,
+ bio="Python developer, works at Redis, Inc."
+)
+```
+
+### Optional Fields
+
+What would happen if we left out one of these fields, like `bio`?
+
+```python
+import datetime
+
+from redis_om import HashModel
+from pydantic import ValidationError
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: str
+
+
+# All fields are required because none of the fields
+# are marked `Optional`, so we get a validation error:
+try:
+ Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew.brookins@example.com",
+ join_date=datetime.date.today(),
+ age=38 # <- We didn't pass in a bio!
+ )
+except ValidationError as e:
+ print(e)
+ """
+ ValidationError: 1 validation error for Customer
+ bio
+ field required (type=value_error.missing)
+ """
+```
+
+If we want the `bio` field to be optional, we need to change the type annotation to use `Optional`.
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: Optional[str] # <- Now, bio is an Optional[str]
+```
+
+Now we can create `Customer` objects with or without the `bio` field.
+
+### Default Values
+
+Fields can have default values. You set them by assigning a value to a field.
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: Optional[str] = "Super dope" # <- We added a default here
+```
+
+Now, if we create a `Customer` object without a `bio` field, it will use the default value.
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: Optional[str] = "Super dope"
+
+
+andrew = Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew.brookins@example.com",
+ join_date=datetime.date.today(),
+ age=38) # <- Notice, we didn't give a bio!
+
+print(andrew.bio) # <- So we got the default value.
+# > 'Super Dope'
+```
+
+The model will then save this default value to Redis the next time you call `save()`.
+
+### Automatic Primary Keys
+
+Models generate a globally unique primary key automatically without needing to talk to Redis.
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: Optional[str] = "Super dope"
+
+
+andrew = Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew.brookins@example.com",
+ join_date=datetime.date.today(),
+ age=38)
+
+print(andrew.pk)
+# > '01FJM6PH661HCNNRC884H6K30C'
+```
+
+The ID is available *before* you save the model.
+
+The default ID generation function creates [ULIDs](https://github.com/ulid/spec), though you can change the function that generates the primary key for models if you'd like to use a different kind of primary key.
+
+## Validating Data
+
+Redis OM uses [Pydantic][pydantic-url] to validate data based on the type annotations you assign to fields in a model class.
+
+This validation ensures that fields like `first_name`, which the `Customer` model marked as a `str`, are always strings. **But every Redis OM model is also a Pydantic model**, so you can use Pydantic validators like `EmailStr`, `Pattern`, and many more for complex validations!
+
+For example, we defined the `join_date` for our `Customer` model earlier as a `datetime.date`. So, if we try to create a model with a `join_date` that isn't a date, we'll get a validation error.
+
+Let's try it now:
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import HashModel
+from pydantic import ValidationError
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: Optional[str] = "Super dope"
+
+
+try:
+ Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date="not a date!", # <- The problem line!
+ age=38
+ )
+except ValidationError as e:
+ print(e)
+ """
+ pydantic.error_wrappers.ValidationError: 1 validation error for Customer
+ join_date
+ invalid date format (type=value_error.date)
+ """
+```
+
+### Models Coerce Values By Default
+
+You might wonder what qualifies as a "date" in our last validation example. By default, Redis OM will try to coerce input values to the correct type. That means we can pass a date string for `join_date` instead of a `date` object:
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+
+
+andrew = Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date="2020-01-02", # <- We're passing a YYYY-MM-DD date string now
+ age=38
+)
+
+print(andrew.join_date)
+# > 2021-11-02
+type(andrew.join_date)
+# > datetime.date # The model parsed the string automatically!
+```
+
+This ability to combine parsing (in this case, a YYYY-MM-DD date string) with validation can save you a lot of work.
+
+However, you can turn off coercion -- check the next section on using strict validation.
+
+### Strict Validation
+
+You can turn on strict validation to reject values for a field unless they match the exact type of the model's type annotations.
+
+You do this by changing a field's type annotation to use one of the ["strict" types provided by Pydantic](https://pydantic-docs.helpmanual.io/usage/types/#strict-types).
+
+Redis OM supports all of Pydantic's strict types: `StrictStr`, `StrictBytes`, `StrictInt`, `StrictFloat`, and `StrictBool`.
+
+If we wanted to make sure that the `age` field only accepts integers and doesn't try to parse a string containing an integer, like "1", we'd use the `StrictInt` class.
+
+```python
+import datetime
+from typing import Optional
+
+from pydantic import StrictInt, ValidationError
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: StrictInt # <- Instead of int, we use StrictInt
+ bio: Optional[str]
+
+
+# Now if we use a string instead of an integer for `age`,
+# we get a validation error:
+try:
+ Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date="2020-01-02", # <- A date as a string shouldn't work now!
+ age="38"
+ )
+except ValidationError as e:
+ print(e)
+ """
+ pydantic.error_wrappers.ValidationError: 1 validation error for Customer
+ join_date
+ Value must be a datetime.date object (type=value_error)
+ """
+```
+
+Pydantic doesn't include a `StrictDate` class, but we can create our own. In this example, we create a `StrictDate` type that we'll use to validate that `join_date` is a `datetime.date` object.
+
+```python
+import datetime
+from typing import Optional
+
+from pydantic import ValidationError
+from redis_om import HashModel
+
+
+class StrictDate(datetime.date):
+ @classmethod
+ def __get_validators__(cls) -> 'CallableGenerator':
+ yield cls.validate
+
+ @classmethod
+ def validate(cls, value: datetime.date, **kwargs) -> datetime.date:
+ if not isinstance(value, datetime.date):
+ raise ValueError("Value must be a datetime.date object")
+ return value
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: StrictDate
+ age: int
+ bio: Optional[str]
+
+
+# Now if we use a string instead of a date object for `join_date`,
+# we get a validation error:
+try:
+ Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date="2020-01-02", # <- A string shouldn't work now!
+ age="38"
+ )
+except ValidationError as e:
+ print(e)
+ """
+ pydantic.error_wrappers.ValidationError: 1 validation error for Customer
+ join_date
+ Value must be a datetime.date object (type=value_error)
+ """
+```
+
+## Saving Models
+
+We can save the model to Redis by calling `save()`:
+
+```python
+import datetime
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+
+
+andrew = Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew.brookins@example.com",
+ join_date=datetime.date.today(),
+ age=38)
+
+andrew.save()
+```
+
+## Examining Your Data In Redis
+
+You can view the data stored in Redis for any Redis OM model.
+
+First, get the key of a model instance you want to inspect. The `key()` method will give you the exact Redis key used to store the model.
+
+**NOTE:** The naming of this method may be confusing. This is not the primary key, but is instead the Redis key for this model. For this reason, the method name may change.
+
+In this example, we're looking at the key created for the `Customer` model we've been building:
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: Optional[str] = "Super dope"
+
+
+andrew = Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew.brookins@example.com",
+ join_date=datetime.date.today(),
+ age=38)
+
+andrew.save()
+andrew.key()
+# > 'mymodel.Customer:01FKGX1DFEV9Z2XKF59WQ6DC9T'
+```
+
+With the model's Redis key, you can start `redis-cli` and inspect the data stored under that key. Here, we run `JSON.GET` command with `redis-cli` using the running "redis" container that this project's Docker Compose file defines:
+
+```
+$ docker-compose exec -T redis redis-cli HGETALL mymodel.Customer:01FKGX1DFEV9Z2XKF59WQ6DC9r
+
+ 1) "pk"
+ 2) "01FKGX1DFEV9Z2XKF59WQ6DC9T"
+ 3) "first_name"
+ 4) "Andrew"
+ 5) "last_name"
+ 6) "Brookins"
+ 7) "email"
+ 8) "andrew.brookins@example.com"
+ 9) "join_date"
+10) "2021-11-02"
+11) "age"
+12) "38"
+13) "bio"
+14) "Super dope"
+```
+
+## Getting a Model
+
+If you have the primary key of a model, you can call the `get()` method on the model class to get the model's data.
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import HashModel
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+ bio: Optional[str] = "Super dope"
+
+
+andrew = Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew.brookins@example.com",
+ join_date=datetime.date.today(),
+ age=38)
+
+andrew.save()
+
+assert Customer.get(andrew.pk) == andrew
+```
+
+## Querying for Models With Expressions
+
+Redis OM comes with a rich query language that allows you to query Redis with Python expressions.
+
+To show how this works, we'll make a small change to the `Customer` model we defined earlier. We'll add `Field(index=True)` to tell Redis OM that we want to index the `last_name` and `age` fields:
+
+```python
+import datetime
+from typing import Optional
+
+from pydantic import EmailStr
+
+from redis_om import (
+ Field,
+ HashModel,
+ Migrator
+)
+
+
+class Customer(HashModel):
+ first_name: str
+ last_name: str = Field(index=True)
+ email: EmailStr
+ join_date: datetime.date
+ age: int = Field(index=True)
+ bio: Optional[str]
+
+
+# Now, if we use this model with a Redis deployment that has the
+# RediSearch module installed, we can run queries like the following.
+
+# Before running queries, we need to run migrations to set up the
+# indexes that Redis OM will use. You can also use the `migrate`
+# CLI tool for this!
+Migrator().run()
+
+# Find all customers with the last name "Brookins"
+Customer.find(Customer.last_name == "Brookins").all()
+
+# Find all customers that do NOT have the last name "Brookins"
+Customer.find(Customer.last_name != "Brookins").all()
+
+# Find all customers whose last name is "Brookins" OR whose age is
+# 100 AND whose last name is "Smith"
+Customer.find((Customer.last_name == "Brookins") | (
+ Customer.age == 100
+) & (Customer.last_name == "Smith")).all()
+```
+
+Many more types of queries are possible. learn more about querying with Redis OM, see the [documentation on querying](docs/querying.md).
+
+## Next Steps
+
+Now that you know the basics of working with Redis OM, continue on for all the nitty-gritty details about [models and fields](models_and_fields.md).
+
+
+
+[redisearch-url]: https://oss.redis.com/redisearch/
+[redis-json-url]: https://oss.redis.com/redisjson/
diff --git a/docs/index.md b/docs/index.md
index e69de29..20b4e10 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -0,0 +1,3 @@
+# Redis OM Documentation
+
+WIP!
\ No newline at end of file
diff --git a/docs/integrating.md b/docs/integrating.md
index e69de29..404c079 100644
--- a/docs/integrating.md
+++ b/docs/integrating.md
@@ -0,0 +1,3 @@
+# Integration Redis OM With Popular Frameworks
+
+WIP!
\ No newline at end of file
diff --git a/docs/models_and_fields.md b/docs/models_and_fields.md
new file mode 100644
index 0000000..0c082ba
--- /dev/null
+++ b/docs/models_and_fields.md
@@ -0,0 +1,31 @@
+# Models and Fields
+
+**NOTE:** This documentation is a stub. Documentation for this project is a work in progress!
+
+## Introduction
+
+## Saving Data As Hashes With HashModel
+
+### What Does Redis Store?
+
+## Saving Data With JSON With JsonModel
+
+### What Does Redis Store?
+
+## Primary Keys
+
+### Why Primary Keys Matter to Redis OM
+
+### Using the Default Primary Key
+
+### Using a Custom Primary Key
+
+## Meta Classes
+
+## Subclassing Models
+
+### Subclassing and Meta Objects
+
+## Saving Models
+
+
diff --git a/docs/querying.md b/docs/querying.md
new file mode 100644
index 0000000..2a11e65
--- /dev/null
+++ b/docs/querying.md
@@ -0,0 +1,67 @@
+# Querying
+
+**NOTE:** This documentation is a stub that uses examples from other documentation in this project (the README, the Getting Started guide, etc.). Detailed documentation on querying in a work in progress.
+
+Querying in Redis OM uses a rich expression syntax inspired by the Django ORM, SQLAlchemy, and Peewee.
+
+In the following example, we define `Address` and `Customer` models for use with a Redis database that has the [RedisJSON](redis-json-url) module installed.
+
+With these two classes defined, we can query on any indexed fields in the models -- including indexed fields within embedded models.
+
+```python
+import datetime
+from typing import Optional
+
+from redis_om import (
+ EmbeddedJsonModel,
+ JsonModel,
+ Field,
+ Migrator
+)
+
+
+class Address(EmbeddedJsonModel):
+ address_line_1: str
+ address_line_2: Optional[str]
+ city: str = Field(index=True)
+ state: str = Field(index=True)
+ country: str
+ postal_code: str = Field(index=True)
+
+
+class Customer(JsonModel):
+ first_name: str = Field(index=True)
+ last_name: str = Field(index=True)
+ email: str = Field(index=True)
+ join_date: datetime.date
+ age: int = Field(index=True)
+ bio: Optional[str] = Field(index=True, full_text_search=True,
+ default="")
+
+ # Creates an embedded model.
+ address: Address
+
+
+# Before running queries, we need to run migrations to set up the
+# indexes that Redis OM will use. You can also use the `migrate`
+# CLI tool for this!
+Migrator().run()
+
+# Here are a few example queries that use these two models...
+
+# Find all customers with the last name "Brookins"
+Customer.find(Customer.last_name == "Brookins").all()
+
+# Find all customers that do NOT have the last name "Brookins"
+Customer.find(Customer.last_name != "Brookins").all()
+
+# Find all customers whose last name is "Brookins" OR whose age is
+# 100 AND whose last name is "Smith"
+Customer.find((Customer.last_name == "Brookins") | (
+ Customer.age == 100
+) & (Customer.last_name == "Smith")).all()
+
+# Find all customers who live in San Antonio, TX
+Customer.find(Customer.address.city == "San Antonio",
+ Customer.address.state == "TX")
+```
\ No newline at end of file
diff --git a/docs/redis_modules.md b/docs/redis_modules.md
new file mode 100644
index 0000000..db60c4e
--- /dev/null
+++ b/docs/redis_modules.md
@@ -0,0 +1,30 @@
+# Redis Modules
+
+Some advanced features of Redis OM, like rich query expressions and saving data as JSON, rely on core features from two source available Redis modules: **RediSearch** and **RedisJSON**.
+
+These modules are the "magic" behind the scenes:
+
+* RediSearch adds querying, indexing, and full-text search to Redis
+* RedisJSON adds the JSON data type to Redis
+
+## Why this is important
+
+Without RediSearch or RedisJSON installed, you can still use Redis OM to create declarative models backed by Redis.
+
+We'll store your model data in Redis as Hashes, and you can retrieve models using their primary keys. You'll also get all the validation features from Pydantic.
+
+So, what won't work without these modules?
+
+1. Without RedisJSON, you won't be able to nest models inside each other, like we did with the example model of a `Customer` model that has an `Address` embedded inside it.
+2. Without RediSearch, you won't be able to use our expressive queries to find models -- just primary keys.
+
+## So how do you get RediSearch and RedisJSON?
+
+You can use RediSearch and RedisJSON with your self-hosted Redis deployment. Just follow the instructions on installing the binary versions of the modules in their Quick Start Guides:
+
+- [RedisJSON Quick Start - Running Binaries](https://oss.redis.com/redisjson/#download-and-running-binaries)
+- [RediSearch Quick Start - Running Binaries](https://oss.redis.com/redisearch/Quick_Start/#download_and_running_binaries)
+
+**NOTE**: Both of these modules' Quick Start Guides also have instructions on how to run the modules in Redis with Docker.
+
+Don't want to run Redis yourself? RediSearch and RedisJSON are also available on Redis Cloud. [Get started here.](https://redis.com/try-free/)
\ No newline at end of file
diff --git a/docs/testing.md b/docs/testing.md
new file mode 100644
index 0000000..7cb7055
--- /dev/null
+++ b/docs/testing.md
@@ -0,0 +1,5 @@
+# Testing Your Models
+
+**NOTE:** This documentation is a Work in Progress.
+
+Writing tests that use a Redis OM model requires some setup. For now, review the tests in the redis-om-python project for examples.
diff --git a/docs/validation.md b/docs/validation.md
new file mode 100644
index 0000000..9b165dd
--- /dev/null
+++ b/docs/validation.md
@@ -0,0 +1,73 @@
+# Validation
+
+Redis OM uses [Pydantic][pydantic-url] behind the scenes to validate data at runtime, based on the model's type annotations.
+
+## Basic Type Validation
+
+Validation works for basic type annotations like `str`. Thus, given the following model:
+
+```python
+class Customer(HashModel):
+ first_name: str
+ last_name: str
+ email: EmailStr
+ join_date: datetime.date
+ age: int
+ bio: Optional[str]
+```
+
+... Redis OM will ensure that `first_name` is always a string.
+
+But every Redis OM model is also a Pydantic model, so you can use existing Pydantic validators like `EmailStr`, `Pattern`, and many more for complex validation!
+
+## Complex Validation
+
+Let's see what happens if we try to create a `Customer` object with an invalid email address.
+
+```python
+# We'll get a validation error if we try to use an invalid email address!
+Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="Not an email address!",
+ join_date=datetime.date.today(),
+ age=38,
+ bio="Python developer, works at Redis, Inc."
+)
+```
+
+This code generates the following error:
+
+```
+ Traceback:
+ pydantic.error_wrappers.ValidationError: 1 validation error for Customer
+ email
+ value is not a valid email address (type=value_error.email)
+```
+
+We'll also get a validation error if we change a field on a model instance to an invalid value and then try to save it:
+
+```python
+andrew = Customer(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew.brookins@example.com",
+ join_date=datetime.date.today(),
+ age=38,
+ bio="Python developer, works at Redis, Inc."
+)
+
+andrew.email = "Not valid"
+andrew.save()
+```
+
+Once again, we get the valiation error:
+
+```
+ Traceback:
+ pydantic.error_wrappers.ValidationError: 1 validation error for Customer
+ email
+ value is not a valid email address (type=value_error.email)
+```
+
+[pydantic-url]: https://github.com/samuelcolvin/pydantic
diff --git a/images/logo.svg b/images/logo.svg
new file mode 100644
index 0000000..1c70d4f
--- /dev/null
+++ b/images/logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/make_sync.py b/make_sync.py
new file mode 100644
index 0000000..79909d4
--- /dev/null
+++ b/make_sync.py
@@ -0,0 +1,32 @@
+import os
+from pathlib import Path
+
+import unasync
+
+
+def main():
+ additional_replacements = {
+ "aredis_om": "redis_om",
+ "aioredis": "redis"
+ }
+ rules = [
+ unasync.Rule(
+ fromdir="/aredis_om/",
+ todir="/redis_om/",
+ additional_replacements=additional_replacements,
+ ),
+ ]
+
+ filepaths = []
+ for root, _, filenames in os.walk(
+ Path(__file__).absolute().parent / "aredis_om"
+ ):
+ for filename in filenames:
+ if filename.rpartition(".")[-1] in ("py", "pyi",):
+ filepaths.append(os.path.join(root, filename))
+
+ unasync.unasync_files(filepaths, rules)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/poetry.lock b/poetry.lock
index c93d667..a7a19f4 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -23,7 +23,7 @@ python-versions = "*"
[[package]]
name = "astroid"
-version = "2.8.3"
+version = "2.8.4"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
@@ -31,16 +31,20 @@ python-versions = "~=3.6"
[package.dependencies]
lazy-object-proxy = ">=1.4.0"
+typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""}
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
wrapt = ">=1.11,<1.14"
[[package]]
name = "async-timeout"
-version = "3.0.1"
+version = "4.0.0"
description = "Timeout context manager for asyncio programs"
category = "main"
optional = false
-python-versions = ">=3.5.3"
+python-versions = ">=3.6"
+
+[package.dependencies]
+typing-extensions = ">=3.6.5"
[[package]]
name = "atomicwrites"
@@ -72,6 +76,21 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "backports.entry-points-selectable"
+version = "1.1.1"
+description = "Compatibility shim providing selectable entry points for older implementations"
+category = "dev"
+optional = false
+python-versions = ">=2.7"
+
+[package.dependencies]
+importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
+
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
+testing = ["pytest", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"]
+
[[package]]
name = "bandit"
version = "1.7.0"
@@ -89,7 +108,7 @@ stevedore = ">=1.20.0"
[[package]]
name = "black"
-version = "21.9b0"
+version = "21.10b0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
@@ -102,6 +121,7 @@ pathspec = ">=0.9.0,<1"
platformdirs = ">=2"
regex = ">=2020.1.8"
tomli = ">=0.2.6,<2.0.0"
+typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""}
typing-extensions = [
{version = ">=3.10.0.0", markers = "python_version < \"3.10\""},
{version = "!=3.10.0.1", markers = "python_version >= \"3.10\""},
@@ -109,11 +129,54 @@ typing-extensions = [
[package.extras]
colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"]
+d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
-python2 = ["typed-ast (>=1.4.2)"]
+python2 = ["typed-ast (>=1.4.3)"]
uvloop = ["uvloop (>=0.15.2)"]
+[[package]]
+name = "bleach"
+version = "4.1.0"
+description = "An easy safelist-based HTML-sanitizing tool."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+packaging = "*"
+six = ">=1.9.0"
+webencodings = "*"
+
+[[package]]
+name = "certifi"
+version = "2021.10.8"
+description = "Python package for providing Mozilla's CA Bundle."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "cffi"
+version = "1.15.0"
+description = "Foreign Function Interface for Python calling C code."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+pycparser = "*"
+
+[[package]]
+name = "charset-normalizer"
+version = "2.0.7"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+category = "dev"
+optional = false
+python-versions = ">=3.5.0"
+
+[package.extras]
+unicode_backport = ["unicodedata2"]
+
[[package]]
name = "click"
version = "8.0.3"
@@ -124,6 +187,7 @@ python-versions = ">=3.6"
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
+importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
[[package]]
name = "colorama"
@@ -135,7 +199,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "coverage"
-version = "6.0.2"
+version = "6.1.1"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
@@ -147,6 +211,25 @@ tomli = {version = "*", optional = true, markers = "extra == \"toml\""}
[package.extras]
toml = ["tomli"]
+[[package]]
+name = "cryptography"
+version = "35.0.0"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+cffi = ">=1.12"
+
+[package.extras]
+docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
+docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
+pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
+sdist = ["setuptools_rust (>=0.11.4)"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
+
[[package]]
name = "decorator"
version = "5.1.0"
@@ -155,6 +238,49 @@ category = "dev"
optional = false
python-versions = ">=3.5"
+[[package]]
+name = "distlib"
+version = "0.3.3"
+description = "Distribution utilities"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "dnspython"
+version = "2.1.0"
+description = "DNS toolkit"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+dnssec = ["cryptography (>=2.6)"]
+doh = ["requests", "requests-toolbelt"]
+idna = ["idna (>=2.1)"]
+curio = ["curio (>=1.2)", "sniffio (>=1.1)"]
+trio = ["trio (>=0.14.0)", "sniffio (>=1.1)"]
+
+[[package]]
+name = "docutils"
+version = "0.18"
+description = "Docutils -- Python Documentation Utilities"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+
+[[package]]
+name = "email-validator"
+version = "1.1.3"
+description = "A robust email syntax and deliverability validation library for Python 2.x/3.x."
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[package.dependencies]
+dnspython = ">=1.15.0"
+idna = ">=2.0.0"
+
[[package]]
name = "execnet"
version = "1.9.0"
@@ -166,6 +292,18 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
testing = ["pre-commit"]
+[[package]]
+name = "filelock"
+version = "3.3.2"
+description = "A platform independent file lock."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
+testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"]
+
[[package]]
name = "flake8"
version = "4.0.1"
@@ -175,20 +313,21 @@ optional = false
python-versions = ">=3.6"
[package.dependencies]
+importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""}
mccabe = ">=0.6.0,<0.7.0"
pycodestyle = ">=2.8.0,<2.9.0"
pyflakes = ">=2.4.0,<2.5.0"
[[package]]
name = "gitdb"
-version = "4.0.7"
+version = "4.0.9"
description = "Git Object Database"
category = "dev"
optional = false
-python-versions = ">=3.4"
+python-versions = ">=3.6"
[package.dependencies]
-smmap = ">=3.0.1,<5"
+smmap = ">=3.0.1,<6"
[[package]]
name = "gitpython"
@@ -202,6 +341,30 @@ python-versions = ">=3.7"
gitdb = ">=4.0.1,<5"
typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""}
+[[package]]
+name = "idna"
+version = "3.3"
+description = "Internationalized Domain Names in Applications (IDNA)"
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+
+[[package]]
+name = "importlib-metadata"
+version = "4.2.0"
+description = "Read metadata from Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
+testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
+
[[package]]
name = "iniconfig"
version = "1.1.1"
@@ -225,7 +388,7 @@ toml = {version = ">=0.10.2", markers = "python_version > \"3.6\""}
[[package]]
name = "ipython"
-version = "7.28.0"
+version = "7.29.0"
description = "IPython: Productive Interactive Computing"
category = "dev"
optional = false
@@ -257,7 +420,7 @@ test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipyk
[[package]]
name = "isort"
-version = "5.9.3"
+version = "5.10.1"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
@@ -284,6 +447,36 @@ parso = ">=0.8.0,<0.9.0"
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"]
+[[package]]
+name = "jeepney"
+version = "0.7.1"
+description = "Low-level, pure Python DBus protocol wrapper."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio", "async-timeout"]
+trio = ["trio", "async-generator"]
+
+[[package]]
+name = "keyring"
+version = "23.2.1"
+description = "Store and access your passwords safely."
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+importlib-metadata = ">=3.6"
+jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""}
+pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_platform == \"win32\""}
+SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""}
+
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
+testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"]
+
[[package]]
name = "lazy-object-proxy"
version = "1.6.0"
@@ -322,6 +515,7 @@ python-versions = ">=3.5"
[package.dependencies]
mypy-extensions = ">=0.4.3,<0.5.0"
toml = "*"
+typed-ast = {version = ">=1.4.0,<1.5.0", markers = "python_version < \"3.8\""}
typing-extensions = ">=3.7.4"
[package.extras]
@@ -338,14 +532,14 @@ python-versions = "*"
[[package]]
name = "packaging"
-version = "21.0"
+version = "21.2"
description = "Core utilities for Python packages"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
-pyparsing = ">=2.0.2"
+pyparsing = ">=2.0.2,<3"
[[package]]
name = "parso"
@@ -369,7 +563,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "pbr"
-version = "5.6.0"
+version = "5.7.0"
description = "Python Build Reasonableness"
category = "dev"
optional = false
@@ -394,6 +588,17 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "pkginfo"
+version = "1.7.1"
+description = "Query metadatdata from sdists / bdists / installed packages."
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.extras]
+testing = ["nose", "coverage"]
+
[[package]]
name = "platformdirs"
version = "2.4.0"
@@ -414,6 +619,9 @@ category = "dev"
optional = false
python-versions = ">=3.6"
+[package.dependencies]
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
@@ -428,7 +636,7 @@ python-versions = "*"
[[package]]
name = "prompt-toolkit"
-version = "3.0.20"
+version = "3.0.22"
description = "Library for building powerful interactive command lines in Python"
category = "dev"
optional = false
@@ -447,11 +655,11 @@ python-versions = "*"
[[package]]
name = "py"
-version = "1.10.0"
+version = "1.11.0"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]]
name = "pycodestyle"
@@ -461,6 +669,14 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
[[package]]
name = "pydantic"
version = "1.8.2"
@@ -529,6 +745,7 @@ python-versions = ">=3.6"
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
@@ -616,6 +833,14 @@ category = "main"
optional = false
python-versions = "*"
+[[package]]
+name = "pywin32-ctypes"
+version = "0.2.0"
+description = ""
+category = "dev"
+optional = false
+python-versions = "*"
+
[[package]]
name = "pyyaml"
version = "6.0"
@@ -624,6 +849,22 @@ category = "dev"
optional = false
python-versions = ">=3.6"
+[[package]]
+name = "readme-renderer"
+version = "30.0"
+description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+bleach = ">=2.1.0"
+docutils = ">=0.13.1"
+Pygments = ">=2.5.1"
+
+[package.extras]
+md = ["cmarkgfm (>=0.5.0,<0.7.0)"]
+
[[package]]
name = "redis"
version = "3.5.3"
@@ -637,12 +878,64 @@ hiredis = ["hiredis (>=0.1.3)"]
[[package]]
name = "regex"
-version = "2021.10.8"
+version = "2021.11.2"
description = "Alternative regular expression module, to replace re."
category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "requests"
+version = "2.26.0"
+description = "Python HTTP for Humans."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+
+[package.dependencies]
+certifi = ">=2017.4.17"
+charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
+idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
+urllib3 = ">=1.21.1,<1.27"
+
+[package.extras]
+socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
+use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
+
+[[package]]
+name = "requests-toolbelt"
+version = "0.9.1"
+description = "A utility belt for advanced users of python-requests"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+requests = ">=2.0.1,<3.0.0"
+
+[[package]]
+name = "rfc3986"
+version = "1.5.0"
+description = "Validating URI References per RFC 3986"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.extras]
+idna2008 = ["idna"]
+
+[[package]]
+name = "secretstorage"
+version = "3.3.1"
+description = "Python bindings to FreeDesktop.org Secret Service API"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+cryptography = ">=2.0"
+jeepney = ">=0.6"
+
[[package]]
name = "six"
version = "1.16.0"
@@ -653,11 +946,11 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "smmap"
-version = "4.0.0"
+version = "5.0.0"
description = "A pure Python implementation of a sliding window memory map manager"
category = "dev"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.6"
[[package]]
name = "stevedore"
@@ -668,6 +961,7 @@ optional = false
python-versions = ">=3.6"
[package.dependencies]
+importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""}
pbr = ">=2.0.0,<2.1.0 || >2.1.0"
[[package]]
@@ -680,15 +974,65 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "tomli"
-version = "1.2.1"
+version = "1.2.2"
description = "A lil' TOML parser"
category = "dev"
optional = false
python-versions = ">=3.6"
+[[package]]
+name = "tox"
+version = "3.24.4"
+description = "tox is a generic virtualenv management and test command line tool"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[package.dependencies]
+colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""}
+filelock = ">=3.0.0"
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+packaging = ">=14"
+pluggy = ">=0.12.0"
+py = ">=1.4.17"
+six = ">=1.14.0"
+toml = ">=0.9.4"
+virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7"
+
+[package.extras]
+docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"]
+testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "pytest-xdist (>=1.22.2)", "pathlib2 (>=2.3.3)"]
+
+[[package]]
+name = "tox-pyenv"
+version = "1.1.0"
+description = "tox plugin that makes tox use `pyenv which` to find python executables"
+category = "dev"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+tox = ">=2.0"
+
+[[package]]
+name = "tqdm"
+version = "4.62.3"
+description = "Fast, Extensible Progress Meter"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+dev = ["py-make (>=0.1.0)", "twine", "wheel"]
+notebook = ["ipywidgets (>=6)"]
+telegram = ["requests"]
+
[[package]]
name = "traitlets"
-version = "5.1.0"
+version = "5.1.1"
description = "Traitlets Python configuration system"
category = "dev"
optional = false
@@ -697,6 +1041,33 @@ python-versions = ">=3.7"
[package.extras]
test = ["pytest"]
+[[package]]
+name = "twine"
+version = "3.5.0"
+description = "Collection of utilities for publishing packages on PyPI"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+colorama = ">=0.4.3"
+importlib-metadata = ">=3.6"
+keyring = ">=15.1"
+pkginfo = ">=1.4.2"
+readme-renderer = ">=21.0"
+requests = ">=2.20"
+requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0"
+rfc3986 = ">=1.4.0"
+tqdm = ">=4.14"
+
+[[package]]
+name = "typed-ast"
+version = "1.4.3"
+description = "a fork of Python 2 and 3 ast modules with type comment support"
+category = "dev"
+optional = false
+python-versions = "*"
+
[[package]]
name = "types-redis"
version = "3.5.15"
@@ -729,6 +1100,39 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+[[package]]
+name = "urllib3"
+version = "1.26.7"
+description = "HTTP library with thread-safe connection pooling, file post, and more."
+category = "dev"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+
+[package.extras]
+brotli = ["brotlipy (>=0.6.0)"]
+secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
+socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+
+[[package]]
+name = "virtualenv"
+version = "20.10.0"
+description = "Virtual Python Environment builder"
+category = "dev"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+
+[package.dependencies]
+"backports.entry-points-selectable" = ">=1.0.4"
+distlib = ">=0.3.1,<1"
+filelock = ">=3.2,<4"
+importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
+platformdirs = ">=2,<3"
+six = ">=1.9.0,<2"
+
+[package.extras]
+docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
+testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
+
[[package]]
name = "wcwidth"
version = "0.2.5"
@@ -737,18 +1141,38 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+description = "Character encoding aliases for legacy web content"
+category = "dev"
+optional = false
+python-versions = "*"
+
[[package]]
name = "wrapt"
-version = "1.13.2"
+version = "1.13.3"
description = "Module for decorators, wrappers and monkey patching."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+[[package]]
+name = "zipp"
+version = "3.6.0"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
+testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
+
[metadata]
lock-version = "1.1"
-python-versions = "^3.8"
-content-hash = "d2d83b8cd3b094879e1aeb058d0036203942143f12fafa8be03fb0c79460028f"
+python-versions = "^3.7"
+content-hash = "0bf1af9dc29db57d3ed3ca682fd1b497e34812815b43260d8b1a9dfdb9447c8e"
[metadata.files]
aioredis = [
@@ -760,12 +1184,12 @@ appnope = [
{file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"},
]
astroid = [
- {file = "astroid-2.8.3-py3-none-any.whl", hash = "sha256:f9d66e3a4a0e5b52819b2ff41ac2b179df9d180697db71c92beb33a60c661794"},
- {file = "astroid-2.8.3.tar.gz", hash = "sha256:0e361da0744d5011d4f5d57e64473ba9b7ab4da1e2d45d6631ebd67dd28c3cce"},
+ {file = "astroid-2.8.4-py3-none-any.whl", hash = "sha256:0755c998e7117078dcb7d0bda621391dd2a85da48052d948c7411ab187325346"},
+ {file = "astroid-2.8.4.tar.gz", hash = "sha256:1e83a69fd51b013ebf5912d26b9338d6643a55fec2f20c787792680610eed4a2"},
]
async-timeout = [
- {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"},
- {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"},
+ {file = "async-timeout-4.0.0.tar.gz", hash = "sha256:7d87a4e8adba8ededb52e579ce6bc8276985888913620c935094c2276fd83382"},
+ {file = "async_timeout-4.0.0-py3-none-any.whl", hash = "sha256:f3303dddf6cafa748a92747ab6c2ecf60e0aeca769aee4c151adfce243a05d9b"},
]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
@@ -779,13 +1203,81 @@ backcall = [
{file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"},
{file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
]
+"backports.entry-points-selectable" = [
+ {file = "backports.entry_points_selectable-1.1.1-py2.py3-none-any.whl", hash = "sha256:7fceed9532a7aa2bd888654a7314f864a3c16a4e710b34a58cfc0f08114c663b"},
+ {file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"},
+]
bandit = [
{file = "bandit-1.7.0-py3-none-any.whl", hash = "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07"},
{file = "bandit-1.7.0.tar.gz", hash = "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608"},
]
black = [
- {file = "black-21.9b0-py3-none-any.whl", hash = "sha256:380f1b5da05e5a1429225676655dddb96f5ae8c75bdf91e53d798871b902a115"},
- {file = "black-21.9b0.tar.gz", hash = "sha256:7de4cfc7eb6b710de325712d40125689101d21d25283eed7e9998722cf10eb91"},
+ {file = "black-21.10b0-py3-none-any.whl", hash = "sha256:6eb7448da9143ee65b856a5f3676b7dda98ad9abe0f87fce8c59291f15e82a5b"},
+ {file = "black-21.10b0.tar.gz", hash = "sha256:a9952229092e325fe5f3dae56d81f639b23f7131eb840781947e4b2886030f33"},
+]
+bleach = [
+ {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"},
+ {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"},
+]
+certifi = [
+ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
+ {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
+]
+cffi = [
+ {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
+ {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"},
+ {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"},
+ {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"},
+ {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"},
+ {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"},
+ {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"},
+ {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"},
+ {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"},
+ {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"},
+ {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"},
+ {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"},
+ {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"},
+ {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"},
+ {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"},
+ {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"},
+ {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"},
+ {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"},
+ {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"},
+ {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"},
+ {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"},
+ {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"},
+ {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"},
+ {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"},
+ {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"},
+ {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"},
+ {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"},
+ {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"},
+ {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"},
+ {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"},
+ {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"},
+ {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"},
+ {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"},
+ {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"},
+ {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"},
+ {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"},
+ {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"},
+ {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"},
+ {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"},
+ {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"},
+ {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"},
+ {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"},
+ {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"},
+ {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"},
+ {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"},
+ {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"},
+ {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"},
+ {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"},
+ {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"},
+ {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"},
+]
+charset-normalizer = [
+ {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"},
+ {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"},
]
click = [
{file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"},
@@ -796,60 +1288,126 @@ colorama = [
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
coverage = [
- {file = "coverage-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1549e1d08ce38259de2bc3e9a0d5f3642ff4a8f500ffc1b2df73fd621a6cdfc0"},
- {file = "coverage-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcae10fccb27ca2a5f456bf64d84110a5a74144be3136a5e598f9d9fb48c0caa"},
- {file = "coverage-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:53a294dc53cfb39c74758edaa6305193fb4258a30b1f6af24b360a6c8bd0ffa7"},
- {file = "coverage-6.0.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8251b37be1f2cd9c0e5ccd9ae0380909c24d2a5ed2162a41fcdbafaf59a85ebd"},
- {file = "coverage-6.0.2-cp310-cp310-win32.whl", hash = "sha256:db42baa892cba723326284490283a68d4de516bfb5aaba369b4e3b2787a778b7"},
- {file = "coverage-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:bbffde2a68398682623d9dd8c0ca3f46fda074709b26fcf08ae7a4c431a6ab2d"},
- {file = "coverage-6.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:60e51a3dd55540bec686d7fff61b05048ca31e804c1f32cbb44533e6372d9cc3"},
- {file = "coverage-6.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a6a9409223a27d5ef3cca57dd7cd4dfcb64aadf2fad5c3b787830ac9223e01a"},
- {file = "coverage-6.0.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4b34ae4f51bbfa5f96b758b55a163d502be3dcb24f505d0227858c2b3f94f5b9"},
- {file = "coverage-6.0.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3bbda1b550e70fa6ac40533d3f23acd4f4e9cb4e6e77251ce77fdf41b3309fb2"},
- {file = "coverage-6.0.2-cp36-cp36m-win32.whl", hash = "sha256:4e28d2a195c533b58fc94a12826f4431726d8eb029ac21d874345f943530c122"},
- {file = "coverage-6.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:a82d79586a0a4f5fd1cf153e647464ced402938fbccb3ffc358c7babd4da1dd9"},
- {file = "coverage-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3be1206dc09fb6298de3fce70593e27436862331a85daee36270b6d0e1c251c4"},
- {file = "coverage-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9cd3828bbe1a40070c11fe16a51df733fd2f0cb0d745fb83b7b5c1f05967df7"},
- {file = "coverage-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d036dc1ed8e1388e995833c62325df3f996675779541f682677efc6af71e96cc"},
- {file = "coverage-6.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:04560539c19ec26995ecfb3d9307ff154fbb9a172cb57e3b3cfc4ced673103d1"},
- {file = "coverage-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:e4fb7ced4d9dec77d6cf533acfbf8e1415fe799430366affb18d69ee8a3c6330"},
- {file = "coverage-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:77b1da5767ed2f44611bc9bc019bc93c03fa495728ec389759b6e9e5039ac6b1"},
- {file = "coverage-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:61b598cbdbaae22d9e34e3f675997194342f866bb1d781da5d0be54783dce1ff"},
- {file = "coverage-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36e9040a43d2017f2787b28d365a4bb33fcd792c7ff46a047a04094dc0e2a30d"},
- {file = "coverage-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9f1627e162e3864a596486774876415a7410021f4b67fd2d9efdf93ade681afc"},
- {file = "coverage-6.0.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e7a0b42db2a47ecb488cde14e0f6c7679a2c5a9f44814393b162ff6397fcdfbb"},
- {file = "coverage-6.0.2-cp38-cp38-win32.whl", hash = "sha256:a1b73c7c4d2a42b9d37dd43199c5711d91424ff3c6c22681bc132db4a4afec6f"},
- {file = "coverage-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:1db67c497688fd4ba85b373b37cc52c50d437fd7267520ecd77bddbd89ea22c9"},
- {file = "coverage-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f2f184bf38e74f152eed7f87e345b51f3ab0b703842f447c22efe35e59942c24"},
- {file = "coverage-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1cf1deb3d5544bd942356364a2fdc8959bad2b6cf6eb17f47d301ea34ae822"},
- {file = "coverage-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ad9b8c1206ae41d46ec7380b78ba735ebb77758a650643e841dd3894966c31d0"},
- {file = "coverage-6.0.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:381d773d896cc7f8ba4ff3b92dee4ed740fb88dfe33b6e42efc5e8ab6dfa1cfe"},
- {file = "coverage-6.0.2-cp39-cp39-win32.whl", hash = "sha256:424c44f65e8be58b54e2b0bd1515e434b940679624b1b72726147cfc6a9fc7ce"},
- {file = "coverage-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:abbff240f77347d17306d3201e14431519bf64495648ca5a49571f988f88dee9"},
- {file = "coverage-6.0.2-pp36-none-any.whl", hash = "sha256:7092eab374346121805fb637572483270324407bf150c30a3b161fc0c4ca5164"},
- {file = "coverage-6.0.2-pp37-none-any.whl", hash = "sha256:30922626ce6f7a5a30bdba984ad21021529d3d05a68b4f71ea3b16bda35b8895"},
- {file = "coverage-6.0.2.tar.gz", hash = "sha256:6807947a09510dc31fa86f43595bf3a14017cd60bf633cc746d52141bfa6b149"},
+ {file = "coverage-6.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:42a1fb5dee3355df90b635906bb99126faa7936d87dfc97eacc5293397618cb7"},
+ {file = "coverage-6.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a00284dbfb53b42e35c7dd99fc0e26ef89b4a34efff68078ed29d03ccb28402a"},
+ {file = "coverage-6.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:51a441011a30d693e71dea198b2a6f53ba029afc39f8e2aeb5b77245c1b282ef"},
+ {file = "coverage-6.1.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e76f017b6d4140a038c5ff12be1581183d7874e41f1c0af58ecf07748d36a336"},
+ {file = "coverage-6.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7833c872718dc913f18e51ee97ea0dece61d9930893a58b20b3daf09bb1af6b6"},
+ {file = "coverage-6.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8186b5a4730c896cbe1e4b645bdc524e62d874351ae50e1db7c3e9f5dc81dc26"},
+ {file = "coverage-6.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbca34dca5a2d60f81326d908d77313816fad23d11b6069031a3d6b8c97a54f9"},
+ {file = "coverage-6.1.1-cp310-cp310-win32.whl", hash = "sha256:72bf437d54186d104388cbae73c9f2b0f8a3e11b6e8d7deb593bd14625c96026"},
+ {file = "coverage-6.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:994ce5a7b3d20981b81d83618aa4882f955bfa573efdbef033d5632b58597ba9"},
+ {file = "coverage-6.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ab6a0fe4c96f8058d41948ddf134420d3ef8c42d5508b5a341a440cce7a37a1d"},
+ {file = "coverage-6.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10ab138b153e4cc408b43792cb7f518f9ee02f4ff55cd1ab67ad6fd7e9905c7e"},
+ {file = "coverage-6.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7e083d32965d2eb6638a77e65b622be32a094fdc0250f28ce6039b0732fbcaa8"},
+ {file = "coverage-6.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:359a32515e94e398a5c0fa057e5887a42e647a9502d8e41165cf5cb8d3d1ca67"},
+ {file = "coverage-6.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:bf656cd74ff7b4ed7006cdb2a6728150aaad69c7242b42a2a532f77b63ea233f"},
+ {file = "coverage-6.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:dc5023be1c2a8b0a0ab5e31389e62c28b2453eb31dd069f4b8d1a0f9814d951a"},
+ {file = "coverage-6.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:557594a50bfe3fb0b1b57460f6789affe8850ad19c1acf2d14a3e12b2757d489"},
+ {file = "coverage-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:9eb0a1923354e0fdd1c8a6f53f5db2e6180d670e2b587914bf2e79fa8acfd003"},
+ {file = "coverage-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:04a92a6cf9afd99f9979c61348ec79725a9f9342fb45e63c889e33c04610d97b"},
+ {file = "coverage-6.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:479228e1b798d3c246ac89b09897ee706c51b3e5f8f8d778067f38db73ccc717"},
+ {file = "coverage-6.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78287731e3601ea5ce9d6468c82d88a12ef8fe625d6b7bdec9b45d96c1ad6533"},
+ {file = "coverage-6.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c95257aa2ccf75d3d91d772060538d5fea7f625e48157f8ca44594f94d41cb33"},
+ {file = "coverage-6.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9ad5895938a894c368d49d8470fe9f519909e5ebc6b8f8ea5190bd0df6aa4271"},
+ {file = "coverage-6.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:326d944aad0189603733d646e8d4a7d952f7145684da973c463ec2eefe1387c2"},
+ {file = "coverage-6.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e7d5606b9240ed4def9cbdf35be4308047d11e858b9c88a6c26974758d6225ce"},
+ {file = "coverage-6.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:572f917267f363101eec375c109c9c1118037c7cc98041440b5eabda3185ac7b"},
+ {file = "coverage-6.1.1-cp37-cp37m-win32.whl", hash = "sha256:35cd2230e1ed76df7d0081a997f0fe705be1f7d8696264eb508076e0d0b5a685"},
+ {file = "coverage-6.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:65ad3ff837c89a229d626b8004f0ee32110f9bfdb6a88b76a80df36ccc60d926"},
+ {file = "coverage-6.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:977ce557d79577a3dd510844904d5d968bfef9489f512be65e2882e1c6eed7d8"},
+ {file = "coverage-6.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62512c0ec5d307f56d86504c58eace11c1bc2afcdf44e3ff20de8ca427ca1d0e"},
+ {file = "coverage-6.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2e5b9c17a56b8bf0c0a9477fcd30d357deb486e4e1b389ed154f608f18556c8a"},
+ {file = "coverage-6.1.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:666c6b32b69e56221ad1551d377f718ed00e6167c7a1b9257f780b105a101271"},
+ {file = "coverage-6.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fb2fa2f6506c03c48ca42e3fe5a692d7470d290c047ee6de7c0f3e5fa7639ac9"},
+ {file = "coverage-6.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f0f80e323a17af63eac6a9db0c9188c10f1fd815c3ab299727150cc0eb92c7a4"},
+ {file = "coverage-6.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:738e823a746841248b56f0f3bd6abf3b73af191d1fd65e4c723b9c456216f0ad"},
+ {file = "coverage-6.1.1-cp38-cp38-win32.whl", hash = "sha256:8605add58e6a960729aa40c0fd9a20a55909dd9b586d3e8104cc7f45869e4c6b"},
+ {file = "coverage-6.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:6e994003e719458420e14ffb43c08f4c14990e20d9e077cb5cad7a3e419bbb54"},
+ {file = "coverage-6.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e3c4f5211394cd0bf6874ac5d29684a495f9c374919833dcfff0bd6d37f96201"},
+ {file = "coverage-6.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e14bceb1f3ae8a14374be2b2d7bc12a59226872285f91d66d301e5f41705d4d6"},
+ {file = "coverage-6.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0147f7833c41927d84f5af9219d9b32f875c0689e5e74ac8ca3cb61e73a698f9"},
+ {file = "coverage-6.1.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b1d0a1bce919de0dd8da5cff4e616b2d9e6ebf3bd1410ff645318c3dd615010a"},
+ {file = "coverage-6.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae6de0e41f44794e68d23644636544ed8003ce24845f213b24de097cbf44997f"},
+ {file = "coverage-6.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2797ed7a7e883b9ab76e8e778bb4c859fc2037d6fd0644d8675e64d58d1653"},
+ {file = "coverage-6.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c40966b683d92869b72ea3c11fd6b99a091fd30e12652727eca117273fc97366"},
+ {file = "coverage-6.1.1-cp39-cp39-win32.whl", hash = "sha256:a11a2c019324fc111485e79d55907e7289e53d0031275a6c8daed30690bc50c0"},
+ {file = "coverage-6.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:4d8b453764b9b26b0dd2afb83086a7c3f9379134e340288d2a52f8a91592394b"},
+ {file = "coverage-6.1.1-pp36-none-any.whl", hash = "sha256:3b270c6b48d3ff5a35deb3648028ba2643ad8434b07836782b1139cf9c66313f"},
+ {file = "coverage-6.1.1-pp37-none-any.whl", hash = "sha256:ffa8fee2b1b9e60b531c4c27cf528d6b5d5da46b1730db1f4d6eee56ff282e07"},
+ {file = "coverage-6.1.1-pp38-none-any.whl", hash = "sha256:4cd919057636f63ab299ccb86ea0e78b87812400c76abab245ca385f17d19fb5"},
+ {file = "coverage-6.1.1.tar.gz", hash = "sha256:b8e4f15b672c9156c1154249a9c5746e86ac9ae9edc3799ee3afebc323d9d9e0"},
+]
+cryptography = [
+ {file = "cryptography-35.0.0-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9"},
+ {file = "cryptography-35.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992"},
+ {file = "cryptography-35.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6"},
+ {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d"},
+ {file = "cryptography-35.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6"},
+ {file = "cryptography-35.0.0-cp36-abi3-win32.whl", hash = "sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8"},
+ {file = "cryptography-35.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588"},
+ {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953"},
+ {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6"},
+ {file = "cryptography-35.0.0-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2"},
+ {file = "cryptography-35.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c"},
+ {file = "cryptography-35.0.0.tar.gz", hash = "sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d"},
]
decorator = [
{file = "decorator-5.1.0-py3-none-any.whl", hash = "sha256:7b12e7c3c6ab203a29e157335e9122cb03de9ab7264b137594103fd4a683b374"},
{file = "decorator-5.1.0.tar.gz", hash = "sha256:e59913af105b9860aa2c8d3272d9de5a56a4e608db9a2f167a8480b323d529a7"},
]
+distlib = [
+ {file = "distlib-0.3.3-py2.py3-none-any.whl", hash = "sha256:c8b54e8454e5bf6237cc84c20e8264c3e991e824ef27e8f1e81049867d861e31"},
+ {file = "distlib-0.3.3.zip", hash = "sha256:d982d0751ff6eaaab5e2ec8e691d949ee80eddf01a62eaa96ddb11531fe16b05"},
+]
+dnspython = [
+ {file = "dnspython-2.1.0-py3-none-any.whl", hash = "sha256:95d12f6ef0317118d2a1a6fc49aac65ffec7eb8087474158f42f26a639135216"},
+ {file = "dnspython-2.1.0.zip", hash = "sha256:e4a87f0b573201a0f3727fa18a516b055fd1107e0e5477cded4a2de497df1dd4"},
+]
+docutils = [
+ {file = "docutils-0.18-py2.py3-none-any.whl", hash = "sha256:a31688b2ea858517fa54293e5d5df06fbb875fb1f7e4c64529271b77781ca8fc"},
+ {file = "docutils-0.18.tar.gz", hash = "sha256:c1d5dab2b11d16397406a282e53953fe495a46d69ae329f55aa98a5c4e3c5fbb"},
+]
+email-validator = [
+ {file = "email_validator-1.1.3-py2.py3-none-any.whl", hash = "sha256:5675c8ceb7106a37e40e2698a57c056756bf3f272cfa8682a4f87ebd95d8440b"},
+ {file = "email_validator-1.1.3.tar.gz", hash = "sha256:aa237a65f6f4da067119b7df3f13e89c25c051327b2b5b66dc075f33d62480d7"},
+]
execnet = [
{file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"},
{file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"},
]
+filelock = [
+ {file = "filelock-3.3.2-py3-none-any.whl", hash = "sha256:bb2a1c717df74c48a2d00ed625e5a66f8572a3a30baacb7657add1d7bac4097b"},
+ {file = "filelock-3.3.2.tar.gz", hash = "sha256:7afc856f74fa7006a289fd10fa840e1eebd8bbff6bffb69c26c54a0512ea8cf8"},
+]
flake8 = [
{file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"},
{file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
]
gitdb = [
- {file = "gitdb-4.0.7-py3-none-any.whl", hash = "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0"},
- {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"},
+ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
+ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"},
]
gitpython = [
{file = "GitPython-3.1.24-py3-none-any.whl", hash = "sha256:dc0a7f2f697657acc8d7f89033e8b1ea94dd90356b2983bca89dc8d2ab3cc647"},
{file = "GitPython-3.1.24.tar.gz", hash = "sha256:df83fdf5e684fef7c6ee2c02fc68a5ceb7e7e759d08b694088d0cacb4eba59e5"},
]
+idna = [
+ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
+ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
+]
+importlib-metadata = [
+ {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"},
+ {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"},
+]
iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
@@ -858,17 +1416,25 @@ ipdb = [
{file = "ipdb-0.13.9.tar.gz", hash = "sha256:951bd9a64731c444fd907a5ce268543020086a697f6be08f7cc2c9a752a278c5"},
]
ipython = [
- {file = "ipython-7.28.0-py3-none-any.whl", hash = "sha256:f16148f9163e1e526f1008d7c8d966d9c15600ca20d1a754287cf96d00ba6f1d"},
- {file = "ipython-7.28.0.tar.gz", hash = "sha256:2097be5c814d1b974aea57673176a924c4c8c9583890e7a5f082f547b9975b11"},
+ {file = "ipython-7.29.0-py3-none-any.whl", hash = "sha256:a658beaf856ce46bc453366d5dc6b2ddc6c481efd3540cb28aa3943819caac9f"},
+ {file = "ipython-7.29.0.tar.gz", hash = "sha256:4f69d7423a5a1972f6347ff233e38bbf4df6a150ef20fbb00c635442ac3060aa"},
]
isort = [
- {file = "isort-5.9.3-py3-none-any.whl", hash = "sha256:e17d6e2b81095c9db0a03a8025a957f334d6ea30b26f9ec70805411e5c7c81f2"},
- {file = "isort-5.9.3.tar.gz", hash = "sha256:9c2ea1e62d871267b78307fe511c0838ba0da28698c5732d54e2790bf3ba9899"},
+ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
+ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
]
jedi = [
{file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"},
{file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"},
]
+jeepney = [
+ {file = "jeepney-0.7.1-py3-none-any.whl", hash = "sha256:1b5a0ea5c0e7b166b2f5895b91a08c14de8915afda4407fb5022a195224958ac"},
+ {file = "jeepney-0.7.1.tar.gz", hash = "sha256:fa9e232dfa0c498bd0b8a3a73b8d8a31978304dcef0515adc859d4e096f96f4f"},
+]
+keyring = [
+ {file = "keyring-23.2.1-py3-none-any.whl", hash = "sha256:bd2145a237ed70c8ce72978b497619ddfcae640b6dcf494402d5143e37755c6e"},
+ {file = "keyring-23.2.1.tar.gz", hash = "sha256:6334aee6073db2fb1f30892697b1730105b5e9a77ce7e61fca6b435225493efe"},
+]
lazy-object-proxy = [
{file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"},
{file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"},
@@ -931,8 +1497,8 @@ mypy-extensions = [
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
packaging = [
- {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"},
- {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"},
+ {file = "packaging-21.2-py3-none-any.whl", hash = "sha256:14317396d1e8cdb122989b916fa2c7e9ca8e2be9e8060a6eff75b6b7b4d8a7e0"},
+ {file = "packaging-21.2.tar.gz", hash = "sha256:096d689d78ca690e4cd8a89568ba06d07ca097e3306a4381635073ca91479966"},
]
parso = [
{file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"},
@@ -943,8 +1509,8 @@ pathspec = [
{file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
]
pbr = [
- {file = "pbr-5.6.0-py2.py3-none-any.whl", hash = "sha256:c68c661ac5cc81058ac94247278eeda6d2e6aecb3e227b0387c30d277e7ef8d4"},
- {file = "pbr-5.6.0.tar.gz", hash = "sha256:42df03e7797b796625b1029c0400279c7c34fd7df24a7d7818a1abb5b38710dd"},
+ {file = "pbr-5.7.0-py2.py3-none-any.whl", hash = "sha256:60002958e459b195e8dbe61bf22bcf344eedf1b4e03a321a5414feb15566100c"},
+ {file = "pbr-5.7.0.tar.gz", hash = "sha256:4651ca1445e80f2781827305de3d76b3ce53195f2227762684eb08f17bc473b7"},
]
pexpect = [
{file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
@@ -954,6 +1520,10 @@ pickleshare = [
{file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"},
{file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
]
+pkginfo = [
+ {file = "pkginfo-1.7.1-py2.py3-none-any.whl", hash = "sha256:37ecd857b47e5f55949c41ed061eb51a0bee97a87c969219d144c0e023982779"},
+ {file = "pkginfo-1.7.1.tar.gz", hash = "sha256:e7432f81d08adec7297633191bbf0bd47faf13cd8724c3a13250e51d542635bd"},
+]
platformdirs = [
{file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"},
{file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"},
@@ -966,21 +1536,25 @@ pptree = [
{file = "pptree-3.1.tar.gz", hash = "sha256:4dd0ba2f58000cbd29d68a5b64bac29bcb5a663642f79404877c0059668a69f6"},
]
prompt-toolkit = [
- {file = "prompt_toolkit-3.0.20-py3-none-any.whl", hash = "sha256:6076e46efae19b1e0ca1ec003ed37a933dc94b4d20f486235d436e64771dcd5c"},
- {file = "prompt_toolkit-3.0.20.tar.gz", hash = "sha256:eb71d5a6b72ce6db177af4a7d4d7085b99756bf656d98ffcc4fecd36850eea6c"},
+ {file = "prompt_toolkit-3.0.22-py3-none-any.whl", hash = "sha256:48d85cdca8b6c4f16480c7ce03fd193666b62b0a21667ca56b4bb5ad679d1170"},
+ {file = "prompt_toolkit-3.0.22.tar.gz", hash = "sha256:449f333dd120bd01f5d296a8ce1452114ba3a71fae7288d2f0ae2c918764fa72"},
]
ptyprocess = [
{file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
{file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
]
py = [
- {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"},
- {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"},
+ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
+ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
pycodestyle = [
{file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
{file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"},
]
+pycparser = [
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+]
pydantic = [
{file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"},
{file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"},
@@ -1049,6 +1623,10 @@ python-ulid = [
{file = "python-ulid-1.0.3.tar.gz", hash = "sha256:5dd8b969312a40e2212cec9c1ad63f25d4b6eafd92ee3195883e0287b6e9d19e"},
{file = "python_ulid-1.0.3-py3-none-any.whl", hash = "sha256:8704dc20f547f531fe3a41d4369842d737a0f275403b909d0872e7ea0fe8d6f2"},
]
+pywin32-ctypes = [
+ {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"},
+ {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"},
+]
pyyaml = [
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
@@ -1084,66 +1662,88 @@ pyyaml = [
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
+readme-renderer = [
+ {file = "readme_renderer-30.0-py2.py3-none-any.whl", hash = "sha256:3286806450d9961d6e3b5f8a59f77e61503799aca5155c8d8d40359b4e1e1adc"},
+ {file = "readme_renderer-30.0.tar.gz", hash = "sha256:8299700d7a910c304072a7601eafada6712a5b011a20139417e1b1e9f04645d8"},
+]
redis = [
{file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"},
{file = "redis-3.5.3.tar.gz", hash = "sha256:0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2"},
]
regex = [
- {file = "regex-2021.10.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:094a905e87a4171508c2a0e10217795f83c636ccc05ddf86e7272c26e14056ae"},
- {file = "regex-2021.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:981c786293a3115bc14c103086ae54e5ee50ca57f4c02ce7cf1b60318d1e8072"},
- {file = "regex-2021.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b0f2f874c6a157c91708ac352470cb3bef8e8814f5325e3c5c7a0533064c6a24"},
- {file = "regex-2021.10.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51feefd58ac38eb91a21921b047da8644155e5678e9066af7bcb30ee0dca7361"},
- {file = "regex-2021.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8de658d7db5987b11097445f2b1f134400e2232cb40e614e5f7b6f5428710e"},
- {file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1ce02f420a7ec3b2480fe6746d756530f69769292eca363218c2291d0b116a01"},
- {file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39079ebf54156be6e6902f5c70c078f453350616cfe7bfd2dd15bdb3eac20ccc"},
- {file = "regex-2021.10.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ff24897f6b2001c38a805d53b6ae72267025878d35ea225aa24675fbff2dba7f"},
- {file = "regex-2021.10.8-cp310-cp310-win32.whl", hash = "sha256:c6569ba7b948c3d61d27f04e2b08ebee24fec9ff8e9ea154d8d1e975b175bfa7"},
- {file = "regex-2021.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:45cb0f7ff782ef51bc79e227a87e4e8f24bc68192f8de4f18aae60b1d60bc152"},
- {file = "regex-2021.10.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fab3ab8aedfb443abb36729410403f0fe7f60ad860c19a979d47fb3eb98ef820"},
- {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e55f8d66f1b41d44bc44c891bcf2c7fad252f8f323ee86fba99d71fd1ad5e3"},
- {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d52c5e089edbdb6083391faffbe70329b804652a53c2fdca3533e99ab0580d9"},
- {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1abbd95cbe9e2467cac65c77b6abd9223df717c7ae91a628502de67c73bf6838"},
- {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9b5c215f3870aa9b011c00daeb7be7e1ae4ecd628e9beb6d7e6107e07d81287"},
- {file = "regex-2021.10.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f540f153c4f5617bc4ba6433534f8916d96366a08797cbbe4132c37b70403e92"},
- {file = "regex-2021.10.8-cp36-cp36m-win32.whl", hash = "sha256:1f51926db492440e66c89cd2be042f2396cf91e5b05383acd7372b8cb7da373f"},
- {file = "regex-2021.10.8-cp36-cp36m-win_amd64.whl", hash = "sha256:5f55c4804797ef7381518e683249310f7f9646da271b71cb6b3552416c7894ee"},
- {file = "regex-2021.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb2baff66b7d2267e07ef71e17d01283b55b3cc51a81b54cc385e721ae172ba4"},
- {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e527ab1c4c7cf2643d93406c04e1d289a9d12966529381ce8163c4d2abe4faf"},
- {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c98b013273e9da5790ff6002ab326e3f81072b4616fd95f06c8fa733d2745f"},
- {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:55ef044899706c10bc0aa052f2fc2e58551e2510694d6aae13f37c50f3f6ff61"},
- {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0ab3530a279a3b7f50f852f1bab41bc304f098350b03e30a3876b7dd89840e"},
- {file = "regex-2021.10.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a37305eb3199d8f0d8125ec2fb143ba94ff6d6d92554c4b8d4a8435795a6eccd"},
- {file = "regex-2021.10.8-cp37-cp37m-win32.whl", hash = "sha256:2efd47704bbb016136fe34dfb74c805b1ef5c7313aef3ce6dcb5ff844299f432"},
- {file = "regex-2021.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:924079d5590979c0e961681507eb1773a142553564ccae18d36f1de7324e71ca"},
- {file = "regex-2021.10.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:19b8f6d23b2dc93e8e1e7e288d3010e58fafed323474cf7f27ab9451635136d9"},
- {file = "regex-2021.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b09d3904bf312d11308d9a2867427479d277365b1617e48ad09696fa7dfcdf59"},
- {file = "regex-2021.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:951be934dc25d8779d92b530e922de44dda3c82a509cdb5d619f3a0b1491fafa"},
- {file = "regex-2021.10.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f125fce0a0ae4fd5c3388d369d7a7d78f185f904c90dd235f7ecf8fe13fa741"},
- {file = "regex-2021.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f199419a81c1016e0560c39773c12f0bd924c37715bffc64b97140d2c314354"},
- {file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:09e1031e2059abd91177c302da392a7b6859ceda038be9e015b522a182c89e4f"},
- {file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c070d5895ac6aeb665bd3cd79f673775caf8d33a0b569e98ac434617ecea57d"},
- {file = "regex-2021.10.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:176796cb7f82a7098b0c436d6daac82f57b9101bb17b8e8119c36eecf06a60a3"},
- {file = "regex-2021.10.8-cp38-cp38-win32.whl", hash = "sha256:5e5796d2f36d3c48875514c5cd9e4325a1ca172fc6c78b469faa8ddd3d770593"},
- {file = "regex-2021.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:e4204708fa116dd03436a337e8e84261bc8051d058221ec63535c9403a1582a1"},
- {file = "regex-2021.10.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6dcf53d35850ce938b4f044a43b33015ebde292840cef3af2c8eb4c860730fff"},
- {file = "regex-2021.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8b6ee6555b6fbae578f1468b3f685cdfe7940a65675611365a7ea1f8d724991"},
- {file = "regex-2021.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2ec1c106d3f754444abf63b31e5c4f9b5d272272a491fa4320475aba9e8157c"},
- {file = "regex-2021.10.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973499dac63625a5ef9dfa4c791aa33a502ddb7615d992bdc89cf2cc2285daa3"},
- {file = "regex-2021.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88dc3c1acd3f0ecfde5f95c32fcb9beda709dbdf5012acdcf66acbc4794468eb"},
- {file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4786dae85c1f0624ac77cb3813ed99267c9adb72e59fdc7297e1cf4d6036d493"},
- {file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe6ce4f3d3c48f9f402da1ceb571548133d3322003ce01b20d960a82251695d2"},
- {file = "regex-2021.10.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9e3e2cea8f1993f476a6833ef157f5d9e8c75a59a8d8b0395a9a6887a097243b"},
- {file = "regex-2021.10.8-cp39-cp39-win32.whl", hash = "sha256:82cfb97a36b1a53de32b642482c6c46b6ce80803854445e19bc49993655ebf3b"},
- {file = "regex-2021.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:b04e512eb628ea82ed86eb31c0f7fc6842b46bf2601b66b1356a7008327f7700"},
- {file = "regex-2021.10.8.tar.gz", hash = "sha256:26895d7c9bbda5c52b3635ce5991caa90fbb1ddfac9c9ff1c7ce505e2282fb2a"},
+ {file = "regex-2021.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:897c539f0f3b2c3a715be651322bef2167de1cdc276b3f370ae81a3bda62df71"},
+ {file = "regex-2021.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:886f459db10c0f9d17c87d6594e77be915f18d343ee138e68d259eb385f044a8"},
+ {file = "regex-2021.11.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:075b0fdbaea81afcac5a39a0d1bb91de887dd0d93bf692a5dd69c430e7fc58cb"},
+ {file = "regex-2021.11.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6238d30dcff141de076344cf7f52468de61729c2f70d776fce12f55fe8df790"},
+ {file = "regex-2021.11.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fab29411d75c2eb48070020a40f80255936d7c31357b086e5931c107d48306e"},
+ {file = "regex-2021.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0148988af0182a0a4e5020e7c168014f2c55a16d11179610f7883dd48ac0ebe"},
+ {file = "regex-2021.11.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be30cd315db0168063a1755fa20a31119da91afa51da2907553493516e165640"},
+ {file = "regex-2021.11.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e9cec3a62d146e8e122d159ab93ac32c988e2ec0dcb1e18e9e53ff2da4fbd30c"},
+ {file = "regex-2021.11.2-cp310-cp310-win32.whl", hash = "sha256:41c66bd6750237a8ed23028a6c9173dc0c92dc24c473e771d3bfb9ee817700c3"},
+ {file = "regex-2021.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:0075fe4e2c2720a685fef0f863edd67740ff78c342cf20b2a79bc19388edf5db"},
+ {file = "regex-2021.11.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0ed3465acf8c7c10aa2e0f3d9671da410ead63b38a77283ef464cbb64275df58"},
+ {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab1fea8832976ad0bebb11f652b692c328043057d35e9ebc78ab0a7a30cf9a70"},
+ {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb1e44d860345ab5d4f533b6c37565a22f403277f44c4d2d5e06c325da959883"},
+ {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9486ebda015913909bc28763c6b92fcc3b5e5a67dee4674bceed112109f5dfb8"},
+ {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20605bfad484e1341b2cbfea0708e4b211d233716604846baa54b94821f487cb"},
+ {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f20f9f430c33597887ba9bd76635476928e76cad2981643ca8be277b8e97aa96"},
+ {file = "regex-2021.11.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1d85ca137756d62c8138c971453cafe64741adad1f6a7e63a22a5a8abdbd19fa"},
+ {file = "regex-2021.11.2-cp36-cp36m-win32.whl", hash = "sha256:af23b9ca9a874ef0ec20e44467b8edd556c37b0f46f93abfa93752ea7c0e8d1e"},
+ {file = "regex-2021.11.2-cp36-cp36m-win_amd64.whl", hash = "sha256:070336382ca92c16c45b4066c4ba9fa83fb0bd13d5553a82e07d344df8d58a84"},
+ {file = "regex-2021.11.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ef4e53e2fdc997d91f5b682f81f7dc9661db9a437acce28745d765d251902d85"},
+ {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35ed5714467fc606551db26f80ee5d6aa1f01185586a7bccd96f179c4b974a11"},
+ {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee36d5113b6506b97f45f2e8447cb9af146e60e3f527d93013d19f6d0405f3b"},
+ {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4fba661a4966adbd2c3c08d3caad6822ecb6878f5456588e2475ae23a6e47929"},
+ {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77f9d16f7970791f17ecce7e7f101548314ed1ee2583d4268601f30af3170856"},
+ {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6a28e87ba69f3a4f30d775b179aac55be1ce59f55799328a0d9b6df8f16b39d"},
+ {file = "regex-2021.11.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9267e4fba27e6dd1008c4f2983cc548c98b4be4444e3e342db11296c0f45512f"},
+ {file = "regex-2021.11.2-cp37-cp37m-win32.whl", hash = "sha256:d4bfe3bc3976ccaeb4ae32f51e631964e2f0e85b2b752721b7a02de5ce3b7f27"},
+ {file = "regex-2021.11.2-cp37-cp37m-win_amd64.whl", hash = "sha256:2bb7cae741de1aa03e3dd3a7d98c304871eb155921ca1f0d7cc11f5aade913fd"},
+ {file = "regex-2021.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:23f93e74409c210de4de270d4bf88fb8ab736a7400f74210df63a93728cf70d6"},
+ {file = "regex-2021.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8ee91e1c295beb5c132ebd78616814de26fedba6aa8687ea460c7f5eb289b72"},
+ {file = "regex-2021.11.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e3ff69ab203b54ce5c480c3ccbe959394ea5beef6bd5ad1785457df7acea92e"},
+ {file = "regex-2021.11.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3c00cb5c71da655e1e5161481455479b613d500dd1bd252aa01df4f037c641f"},
+ {file = "regex-2021.11.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf35e16f4b639daaf05a2602c1b1d47370e01babf9821306aa138924e3fe92"},
+ {file = "regex-2021.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb11c982a849dc22782210b01d0c1b98eb3696ce655d58a54180774e4880ac66"},
+ {file = "regex-2021.11.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e3755e0f070bc31567dfe447a02011bfa8444239b3e9e5cca6773a22133839"},
+ {file = "regex-2021.11.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0621c90f28d17260b41838b22c81a79ff436141b322960eb49c7b3f91d1cbab6"},
+ {file = "regex-2021.11.2-cp38-cp38-win32.whl", hash = "sha256:8fbe1768feafd3d0156556677b8ff234c7bf94a8110e906b2d73506f577a3269"},
+ {file = "regex-2021.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:f9ee98d658a146cb6507be720a0ce1b44f2abef8fb43c2859791d91aace17cd5"},
+ {file = "regex-2021.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3794cea825f101fe0df9af8a00f9fad8e119c91e39a28636b95ee2b45b6c2e5"},
+ {file = "regex-2021.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3576e173e7b4f88f683b4de7db0c2af1b209bb48b2bf1c827a6f3564fad59a97"},
+ {file = "regex-2021.11.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b4f4810117a9072a5aa70f7fea5f86fa9efbe9a798312e0a05044bd707cc33"},
+ {file = "regex-2021.11.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f5930d334c2f607711d54761956aedf8137f83f1b764b9640be21d25a976f3a4"},
+ {file = "regex-2021.11.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:956187ff49db7014ceb31e88fcacf4cf63371e6e44d209cf8816cd4a2d61e11a"},
+ {file = "regex-2021.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17e095f7f96a4b9f24b93c2c915f31a5201a6316618d919b0593afb070a5270e"},
+ {file = "regex-2021.11.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a56735c35a3704603d9d7b243ee06139f0837bcac2171d9ba1d638ce1df0742a"},
+ {file = "regex-2021.11.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adf35d88d9cffc202e6046e4c32e1e11a1d0238b2fcf095c94f109e510ececea"},
+ {file = "regex-2021.11.2-cp39-cp39-win32.whl", hash = "sha256:30fe317332de0e50195665bc61a27d46e903d682f94042c36b3f88cb84bd7958"},
+ {file = "regex-2021.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:85289c25f658e3260b00178757c87f033f3d4b3e40aa4abdd4dc875ff11a94fb"},
+ {file = "regex-2021.11.2.tar.gz", hash = "sha256:5e85dcfc5d0f374955015ae12c08365b565c6f1eaf36dd182476a4d8e5a1cdb7"},
+]
+requests = [
+ {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"},
+ {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"},
+]
+requests-toolbelt = [
+ {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"},
+ {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"},
+]
+rfc3986 = [
+ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
+ {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
+]
+secretstorage = [
+ {file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"},
+ {file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"},
]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
smmap = [
- {file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"},
- {file = "smmap-4.0.0.tar.gz", hash = "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"},
+ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"},
+ {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"},
]
stevedore = [
{file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"},
@@ -1154,12 +1754,60 @@ toml = [
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
tomli = [
- {file = "tomli-1.2.1-py3-none-any.whl", hash = "sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f"},
- {file = "tomli-1.2.1.tar.gz", hash = "sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442"},
+ {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"},
+ {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"},
+]
+tox = [
+ {file = "tox-3.24.4-py2.py3-none-any.whl", hash = "sha256:5e274227a53dc9ef856767c21867377ba395992549f02ce55eb549f9fb9a8d10"},
+ {file = "tox-3.24.4.tar.gz", hash = "sha256:c30b57fa2477f1fb7c36aa1d83292d5c2336cd0018119e1b1c17340e2c2708ca"},
+]
+tox-pyenv = [
+ {file = "tox-pyenv-1.1.0.tar.gz", hash = "sha256:916c2213577aec0b3b5452c5bfb32fd077f3a3196f50a81ad57d7ef3fc2599e4"},
+ {file = "tox_pyenv-1.1.0-py2.py3-none-any.whl", hash = "sha256:e470c18af115fe52eeff95e7e3cdd0793613eca19709966fc2724b79d55246cb"},
+]
+tqdm = [
+ {file = "tqdm-4.62.3-py2.py3-none-any.whl", hash = "sha256:8dd278a422499cd6b727e6ae4061c40b48fce8b76d1ccbf5d34fca9b7f925b0c"},
+ {file = "tqdm-4.62.3.tar.gz", hash = "sha256:d359de7217506c9851b7869f3708d8ee53ed70a1b8edbba4dbcb47442592920d"},
]
traitlets = [
- {file = "traitlets-5.1.0-py3-none-any.whl", hash = "sha256:03f172516916220b58c9f19d7f854734136dd9528103d04e9bf139a92c9f54c4"},
- {file = "traitlets-5.1.0.tar.gz", hash = "sha256:bd382d7ea181fbbcce157c133db9a829ce06edffe097bcf3ab945b435452b46d"},
+ {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"},
+ {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"},
+]
+twine = [
+ {file = "twine-3.5.0-py3-none-any.whl", hash = "sha256:3725b79a6f1cfe84a134544ae1894706e60719ab28547cb6c6de781b9f72706d"},
+ {file = "twine-3.5.0.tar.gz", hash = "sha256:218c42324121d4417cbcbbda59c623b8acc4becfce3daa545e6b6dd48bd21385"},
+]
+typed-ast = [
+ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"},
+ {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"},
+ {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"},
+ {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"},
+ {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"},
+ {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"},
+ {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"},
+ {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"},
+ {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"},
+ {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"},
+ {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"},
+ {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"},
+ {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"},
+ {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"},
+ {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"},
+ {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"},
+ {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"},
]
types-redis = [
{file = "types-redis-3.5.15.tar.gz", hash = "sha256:e52be0077ca1189d8cce813a20c2a70e9e577f34ab898371c6cbed696a88bdee"},
@@ -1178,53 +1826,76 @@ unasync = [
{file = "unasync-0.5.0-py3-none-any.whl", hash = "sha256:8d4536dae85e87b8751dfcc776f7656fd0baf54bb022a7889440dc1b9dc3becb"},
{file = "unasync-0.5.0.tar.gz", hash = "sha256:b675d87cf56da68bd065d3b7a67ac71df85591978d84c53083c20d79a7e5096d"},
]
+urllib3 = [
+ {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"},
+ {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"},
+]
+virtualenv = [
+ {file = "virtualenv-20.10.0-py2.py3-none-any.whl", hash = "sha256:4b02e52a624336eece99c96e3ab7111f469c24ba226a53ec474e8e787b365814"},
+ {file = "virtualenv-20.10.0.tar.gz", hash = "sha256:576d05b46eace16a9c348085f7d0dc8ef28713a2cabaa1cf0aea41e8f12c9218"},
+]
wcwidth = [
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
]
-wrapt = [
- {file = "wrapt-1.13.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3de7b4d3066cc610054e7aa2c005645e308df2f92be730aae3a47d42e910566a"},
- {file = "wrapt-1.13.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:8164069f775c698d15582bf6320a4f308c50d048c1c10cf7d7a341feaccf5df7"},
- {file = "wrapt-1.13.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9adee1891253670575028279de8365c3a02d3489a74a66d774c321472939a0b1"},
- {file = "wrapt-1.13.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a70d876c9aba12d3bd7f8f1b05b419322c6789beb717044eea2c8690d35cb91b"},
- {file = "wrapt-1.13.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3f87042623530bcffea038f824b63084180513c21e2e977291a9a7e65a66f13b"},
- {file = "wrapt-1.13.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:e634136f700a21e1fcead0c137f433dde928979538c14907640607d43537d468"},
- {file = "wrapt-1.13.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3e33c138d1e3620b1e0cc6fd21e46c266393ed5dae0d595b7ed5a6b73ed57aa0"},
- {file = "wrapt-1.13.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:283e402e5357e104ac1e3fba5791220648e9af6fb14ad7d9cc059091af2b31d2"},
- {file = "wrapt-1.13.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ccb34ce599cab7f36a4c90318697ead18312c67a9a76327b3f4f902af8f68ea1"},
- {file = "wrapt-1.13.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:fbad5ba74c46517e6488149514b2e2348d40df88cd6b52a83855b7a8bf04723f"},
- {file = "wrapt-1.13.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:724ed2bc9c91a2b9026e5adce310fa60c6e7c8760b03391445730b9789b9d108"},
- {file = "wrapt-1.13.2-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:83f2793ec6f3ef513ad8d5b9586f5ee6081cad132e6eae2ecb7eac1cc3decae0"},
- {file = "wrapt-1.13.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:0473d1558b93e314e84313cc611f6c86be779369f9d3734302bf185a4d2625b1"},
- {file = "wrapt-1.13.2-cp35-cp35m-win32.whl", hash = "sha256:15eee0e6fd07f48af2f66d0e6f2ff1916ffe9732d464d5e2390695296872cad9"},
- {file = "wrapt-1.13.2-cp35-cp35m-win_amd64.whl", hash = "sha256:bc85d17d90201afd88e3d25421da805e4e135012b5d1f149e4de2981394b2a52"},
- {file = "wrapt-1.13.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c6ee5f8734820c21b9b8bf705e99faba87f21566d20626568eeb0d62cbeaf23c"},
- {file = "wrapt-1.13.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:53c6706a1bcfb6436f1625511b95b812798a6d2ccc51359cd791e33722b5ea32"},
- {file = "wrapt-1.13.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fbe6aebc9559fed7ea27de51c2bf5c25ba2a4156cf0017556f72883f2496ee9a"},
- {file = "wrapt-1.13.2-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:0582180566e7a13030f896c2f1ac6a56134ab5f3c3f4c5538086f758b1caf3f2"},
- {file = "wrapt-1.13.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:bff0a59387a0a2951cb869251257b6553663329a1b5525b5226cab8c88dcbe7e"},
- {file = "wrapt-1.13.2-cp36-cp36m-win32.whl", hash = "sha256:df3eae297a5f1594d1feb790338120f717dac1fa7d6feed7b411f87e0f2401c7"},
- {file = "wrapt-1.13.2-cp36-cp36m-win_amd64.whl", hash = "sha256:1eb657ed84f4d3e6ad648483c8a80a0cf0a78922ef94caa87d327e2e1ad49b48"},
- {file = "wrapt-1.13.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0cdedf681db878416c05e1831ec69691b0e6577ac7dca9d4f815632e3549580"},
- {file = "wrapt-1.13.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:87ee3c73bdfb4367b26c57259995935501829f00c7b3eed373e2ad19ec21e4e4"},
- {file = "wrapt-1.13.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3e0d16eedc242d01a6f8cf0623e9cdc3b869329da3f97a15961d8864111d8cf0"},
- {file = "wrapt-1.13.2-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:8318088860968c07e741537030b1abdd8908ee2c71fbe4facdaade624a09e006"},
- {file = "wrapt-1.13.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d90520616fce71c05dedeac3a0fe9991605f0acacd276e5f821842e454485a70"},
- {file = "wrapt-1.13.2-cp37-cp37m-win32.whl", hash = "sha256:22142afab65daffc95863d78effcbd31c19a8003eca73de59f321ee77f73cadb"},
- {file = "wrapt-1.13.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d0d717e10f952df7ea41200c507cc7e24458f4c45b56c36ad418d2e79dacd1d4"},
- {file = "wrapt-1.13.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:593cb049ce1c391e0288523b30426c4430b26e74c7e6f6e2844bd99ac7ecc831"},
- {file = "wrapt-1.13.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8860c8011a6961a651b1b9f46fdbc589ab63b0a50d645f7d92659618a3655867"},
- {file = "wrapt-1.13.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ada5e29e59e2feb710589ca1c79fd989b1dd94d27079dc1d199ec954a6ecc724"},
- {file = "wrapt-1.13.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:fdede980273aeca591ad354608778365a3a310e0ecdd7a3587b38bc5be9b1808"},
- {file = "wrapt-1.13.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:af9480de8e63c5f959a092047aaf3d7077422ded84695b3398f5d49254af3e90"},
- {file = "wrapt-1.13.2-cp38-cp38-win32.whl", hash = "sha256:c65e623ea7556e39c4f0818200a046cbba7575a6b570ff36122c276fdd30ab0a"},
- {file = "wrapt-1.13.2-cp38-cp38-win_amd64.whl", hash = "sha256:b20703356cae1799080d0ad15085dc3213c1ac3f45e95afb9f12769b98231528"},
- {file = "wrapt-1.13.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c5c4cf188b5643a97e87e2110bbd4f5bc491d54a5b90633837b34d5df6a03fe"},
- {file = "wrapt-1.13.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:82223f72eba6f63eafca87a0f614495ae5aa0126fe54947e2b8c023969e9f2d7"},
- {file = "wrapt-1.13.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:81a4cf257263b299263472d669692785f9c647e7dca01c18286b8f116dbf6b38"},
- {file = "wrapt-1.13.2-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:728e2d9b7a99dd955d3426f237b940fc74017c4a39b125fec913f575619ddfe9"},
- {file = "wrapt-1.13.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:7574de567dcd4858a2ffdf403088d6df8738b0e1eabea220553abf7c9048f59e"},
- {file = "wrapt-1.13.2-cp39-cp39-win32.whl", hash = "sha256:c7ac2c7a8e34bd06710605b21dd1f3576764443d68e069d2afba9b116014d072"},
- {file = "wrapt-1.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e6d1a8eeef415d7fb29fe017de0e48f45e45efd2d1bfda28fc50b7b330859ef"},
- {file = "wrapt-1.13.2.tar.gz", hash = "sha256:dca56cc5963a5fd7c2aa8607017753f534ee514e09103a6c55d2db70b50e7447"},
+webencodings = [
+ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
+ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
+]
+wrapt = [
+ {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"},
+ {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"},
+ {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"},
+ {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"},
+ {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"},
+ {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"},
+ {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"},
+ {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"},
+ {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"},
+ {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"},
+ {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"},
+ {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"},
+ {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"},
+ {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"},
+ {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"},
+ {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"},
+ {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"},
+ {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"},
+ {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"},
+ {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"},
+ {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"},
+ {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"},
+ {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"},
+ {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"},
+ {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"},
+ {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"},
+ {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"},
+ {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"},
+ {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"},
+ {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"},
+ {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"},
+ {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"},
+ {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"},
+ {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"},
+ {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"},
+ {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"},
+ {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"},
+ {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"},
+ {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"},
+ {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"},
+ {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"},
+ {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"},
+ {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"},
+ {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"},
+ {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"},
+ {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"},
+ {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"},
+ {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"},
+ {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"},
+ {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"},
+ {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"},
+]
+zipp = [
+ {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"},
+ {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"},
]
diff --git a/pyproject.toml b/pyproject.toml
index 56be6de..9cdbec6 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,13 +1,28 @@
[tool.poetry]
name = "redis-om"
-version = "0.1.0"
+version = "0.0.11"
description = "A high-level library containing useful Redis abstractions and tools, like an ORM and leaderboard."
-authors = ["Andrew Brookins "]
-license = "MIT"
-build = "build.py"
+authors = ["Andrew Brookins "]
+maintainers = ["Andrew Brookins "]
+license = "BSD-3-Clause"
+readme = "README.md"
+repository = "https://github.com/redis-developer/redis-om-python"
+packages = [
+ { "include" = "aredis_om" },
+ { "include" = "redis_om" },
+]
+classifiers = [
+ "Development Status :: 3 - Alpha",
+ "Intended Audience :: Developers",
+ "Topic :: Database :: Front-Ends",
+]
+include=[
+ "docs/*",
+ "images/*",
+]
[tool.poetry.dependencies]
-python = "^3.8"
+python = "^3.7"
redis = "^3.5.3"
aioredis = "^2.0.0"
pydantic = "^1.8.2"
@@ -33,9 +48,13 @@ pytest-cov = "^3.0.0"
pytest-xdist = "^2.4.0"
unasync = "^0.5.0"
pytest-asyncio = "^0.16.0"
+twine = "^3.4.2"
+email-validator = "^1.1.3"
+tox = "^3.24.4"
+tox-pyenv = "^1.1.0"
[tool.poetry.scripts]
-migrate = "redis_om.orm.cli.migrate:migrate"
+migrate = "redis_om.model.cli.migrate:migrate"
[build-system]
requires = ["poetry-core>=1.0.0"]
diff --git a/redis_om/model/migrations/__init__.py b/redis_om/model/migrations/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/redis_om/model/models.py b/redis_om/model/models.py
deleted file mode 100644
index 81655e5..0000000
--- a/redis_om/model/models.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import abc
-from typing import Optional
-
-from redis_om.model.model import HashModel, JsonModel
-
-
-class BaseJsonModel(JsonModel, abc.ABC):
- class Meta:
- global_key_prefix = "redis-om"
-
-
-class BaseHashModel(HashModel, abc.ABC):
- class Meta:
- global_key_prefix = "redis-om"
-
-
-# class AddressJson(BaseJsonModel):
-# address_line_1: str
-# address_line_2: Optional[str]
-# city: str
-# country: str
-# postal_code: str
-#
-
-
-class AddressHash(BaseHashModel):
- address_line_1: str
- address_line_2: Optional[str]
- city: str
- country: str
- postal_code: str
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..8491fc6
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+from setuptools import setup
+
+packages = \
+['aredis_om',
+ 'aredis_om.model',
+ 'aredis_om.model.cli',
+ 'aredis_om.model.migrations']
+
+package_data = \
+{'': ['*']}
+
+install_requires = \
+['aioredis>=2.0.0,<3.0.0',
+ 'click>=8.0.1,<9.0.0',
+ 'pptree>=3.1,<4.0',
+ 'pydantic>=1.8.2,<2.0.0',
+ 'python-dotenv>=0.19.1,<0.20.0',
+ 'python-ulid>=1.0.3,<2.0.0',
+ 'redis>=3.5.3,<4.0.0',
+ 'six>=1.16.0,<2.0.0',
+ 'types-redis>=3.5.9,<4.0.0',
+ 'types-six>=1.16.1,<2.0.0']
+
+entry_points = \
+{'console_scripts': ['migrate = redis_om.model.cli.migrate:migrate']}
+
+setup_kwargs = {
+ 'name': 'redis-om',
+ 'version': '0.0.11',
+ 'description': 'A high-level library containing useful Redis abstractions and tools, like an ORM and leaderboard.',
+ 'long_description': '\n
\n
\n
\n
\n
\n
\n\n\n
\n Object mapping, and more, for Redis and Python\n
\n\n\n---\n\n[![Version][version-svg]][package-url]\n[![License][license-image]][license-url]\n[![Build Status][ci-svg]][ci-url]\n\n**Redis OM Python** makes it easy to model Redis data in your Python applications.\n\n**Redis OM Python** | [Redis OM Node.js][redis-om-js] | [Redis OM Spring][redis-om-spring] | [Redis OM .NET][redis-om-dotnet]\n\n\n Table of contents
\n\nspan\n\n\n\n- [π‘ Why Redis OM?](#-why-redis-om)\n- [π Modeling Your Data](#-modeling-your-data)\n- [β Validating Data With Your Model](#-validating-data-with-your-model)\n- [π Rich Queries and Embedded Models](#-rich-queries-and-embedded-models)\n- [π» Installation](#-installation)\n- [π Documentation](#-documentation)\n- [βοΈ Troubleshooting](#-troubleshooting)\n- [β¨ So, How Do You Get RediSearch and RedisJSON?](#-so-how-do-you-get-redisearch-and-redisjson)\n- [β€οΈ Contributing](#-contributing)\n- [π License](#-license)\n\n\n\n \n\n## π‘ Why Redis OM?\n\nRedis OM provides high-level abstractions that make it easy to model and query data in Redis with modern Python applications.\n\nThis **preview** release contains the following features:\n\n* Declarative object mapping for Redis objects\n* Declarative secondary-index generation\n* Fluent APIs for querying Redis\n\n## π Modeling Your Data\n\nRedis OM contains powerful declarative models that give you data validation, serialization, and persistence to Redis.\n\nCheck out this example of modeling customer data with Redis OM. First, we create a `Customer` model:\n\n```python\nimport datetime\nfrom typing import Optional\n\nfrom pydantic import EmailStr\n\nfrom aredis_om import HashModel\n\n\nclass Customer(HashModel):\n first_name: str\n last_name: str\n email: EmailStr\n join_date: datetime.date\n age: int\n bio: Optional[str]\n```\n\nNow that we have a `Customer` model, let\'s use it to save customer data to Redis.\n\n```python\nimport datetime\nfrom typing import Optional\n\nfrom pydantic import EmailStr\n\nfrom aredis_om import HashModel\n\n\nclass Customer(HashModel):\n first_name: str\n last_name: str\n email: EmailStr\n join_date: datetime.date\n age: int\n bio: Optional[str]\n\n\n# First, we create a new `Customer` object:\nandrew = Customer(\n first_name="Andrew",\n last_name="Brookins",\n email="andrew.brookins@example.com",\n join_date=datetime.date.today(),\n age=38,\n bio="Python developer, works at Redis, Inc."\n)\n\n# The model generates a globally unique primary key automatically\n# without needing to talk to Redis.\nprint(andrew.pk)\n# > \'01FJM6PH661HCNNRC884H6K30C\'\n\n# We can save the model to Redis by calling `save()`:\nandrew.save()\n\n# To retrieve this customer with its primary key, we use `Customer.get()`:\nassert Customer.get(andrew.pk) == andrew\n```\n\n**Ready to learn more?** Check out the [getting started](docs/getting_started.md) guide.\n\nOr, continue reading to see how Redis OM makes data validation a snap.\n\n## β Validating Data With Your Model\n\nRedis OM uses [Pydantic][pydantic-url] to validate data based on the type annotations you assign to fields in a model class.\n\nThis validation ensures that fields like `first_name`, which the `Customer` model marked as a `str`, are always strings. **But every Redis OM model is also a Pydantic model**, so you can use Pydantic validators like `EmailStr`, `Pattern`, and many more for complex validations!\n\nFor example, because we used the `EmailStr` type for the `email` field, we\'ll get a validation error if we try to create a `Customer` with an invalid email address:\n\n```python\nimport datetime\nfrom typing import Optional\n\nfrom pydantic import EmailStr, ValidationError\n\nfrom aredis_om import HashModel\n\n\nclass Customer(HashModel):\n first_name: str\n last_name: str\n email: EmailStr\n join_date: datetime.date\n age: int\n bio: Optional[str]\n\n\ntry:\n Customer(\n first_name="Andrew",\n last_name="Brookins",\n email="Not an email address!",\n join_date=datetime.date.today(),\n age=38,\n bio="Python developer, works at Redis, Inc."\n )\nexcept ValidationError as e:\n print(e)\n """\n pydantic.error_wrappers.ValidationError: 1 validation error for Customer\n email\n value is not a valid email address (type=value_error.email)\n """\n```\n\n**Any existing Pydantic validator should work** as a drop-in type annotation with a Redis OM model. You can also write arbitrarily complex custom validations!\n\nTo learn more, see the [documentation on data validation](docs/validation.md).\n\n## π Rich Queries and Embedded Models\n\nData modeling, validation, and saving models to Redis all work regardless of how you run Redis.\n\nNext, we\'ll show you the **rich query expressions** and **embedded models** Redis OM provides when the [RediSearch][redisearch-url] and [RedisJSON][redis-json-url] modules are installed in your Redis deployment, or you\'re using [Redis Enterprise][redis-enterprise-url].\n\n**TIP**: *Wait, what\'s a Redis module?* If you aren\'t familiar with Redis modules, review the [So, How Do You Get RediSearch and RedisJSON?](#-so-how-do-you-get-redisearch-and-redisjson) section of this README.\n\n### Querying\n\nRedis OM comes with a rich query language that allows you to query Redis with Python expressions.\n\nTo show how this works, we\'ll make a small change to the `Customer` model we defined earlier. We\'ll add `Field(index=True)` to tell Redis OM that we want to index the `last_name` and `age` fields:\n\n```python\nimport datetime\nfrom typing import Optional\n\nfrom pydantic import EmailStr\n\nfrom aredis_om import (\n Field,\n HashModel,\n Migrator\n)\nfrom aredis_om import get_redis_connection\n\n\nclass Customer(HashModel):\n first_name: str\n last_name: str = Field(index=True)\n email: EmailStr\n join_date: datetime.date\n age: int = Field(index=True)\n bio: Optional[str]\n\n\n# Now, if we use this model with a Redis deployment that has the\n# RediSearch module installed, we can run queries like the following.\n\n# Before running queries, we need to run migrations to set up the\n# indexes that Redis OM will use. You can also use the `migrate`\n# CLI tool for this!\nredis = get_redis_connection()\nMigrator(redis).run()\n\n# Find all customers with the last name "Brookins"\nCustomer.find(Customer.last_name == "Brookins").all()\n\n# Find all customers that do NOT have the last name "Brookins"\nCustomer.find(Customer.last_name != "Brookins").all()\n\n# Find all customers whose last name is "Brookins" OR whose age is \n# 100 AND whose last name is "Smith"\nCustomer.find((Customer.last_name == "Brookins") | (\n Customer.age == 100\n) & (Customer.last_name == "Smith")).all()\n```\n\nThese queries -- and more! -- are possible because **Redis OM manages indexes for you automatically**.\n\nQuerying with this index features a rich expression syntax inspired by the Django ORM, SQLAlchemy, and Peewee. We think you\'ll enjoy it!\n\nTo learn more about how to query with Redis OM, see the [documentation on querying](docs/querying.md).\n****\n### Embedded Models\n\nRedis OM can store and query **nested models** like any document database, with the speed and power you get from Redis. Let\'s see how this works.\n\nIn the next example, we\'ll define a new `Address` model and embed it within the `Customer` model.\n\n```python\nimport datetime\nfrom typing import Optional\n\nfrom aredis_om import (\n EmbeddedJsonModel,\n JsonModel,\n Field,\n Migrator,\n)\nfrom aredis_om import get_redis_connection\n\n\nclass Address(EmbeddedJsonModel):\n address_line_1: str\n address_line_2: Optional[str]\n city: str = Field(index=True)\n state: str = Field(index=True)\n country: str\n postal_code: str = Field(index=True)\n\n\nclass Customer(JsonModel):\n first_name: str = Field(index=True)\n last_name: str = Field(index=True)\n email: str = Field(index=True)\n join_date: datetime.date\n age: int = Field(index=True)\n bio: Optional[str] = Field(index=True, full_text_search=True,\n default="")\n\n # Creates an embedded model.\n address: Address\n\n\n# With these two models and a Redis deployment with the RedisJSON \n# module installed, we can run queries like the following.\n\n# Before running queries, we need to run migrations to set up the\n# indexes that Redis OM will use. You can also use the `migrate`\n# CLI tool for this!\nredis = get_redis_connection()\nMigrator(redis).run()\n\n# Find all customers who live in San Antonio, TX\nCustomer.find(Customer.address.city == "San Antonio",\n Customer.address.state == "TX")\n```\n\nTo learn more, read the [documentation on embedded models](docs/embedded.md).\n\n## π» Installation\n\nInstallation is simple with `pip`, Poetry, or Pipenv.\n\n```sh\n# With pip\n$ pip install redis-om\n\n# Or, using Poetry\n$ poetry add redis-om\n```\n\n## π Documentation\n\nThe Redis OM documentation is available [here](docs/index.md).\n\n## βοΈ Troubleshooting\n\nIf you run into trouble or have any questions, we\'re here to help!\n\nFirst, check the [FAQ](docs/faq.md). If you don\'t find the answer there,\nhit us up on the [Redis Discord Server](http://discord.gg/redis).\n\n## β¨ So How Do You Get RediSearch and RedisJSON?\n\nSome advanced features of Redis OM rely on core features from two source available Redis modules: [RediSearch][redisearch-url] and [RedisJSON][redis-json-url].\n\nYou can run these modules in your self-hosted Redis deployment, or you can use [Redis Enterprise][redis-enterprise-url], which includes both modules.\n\nTo learn more, read [our documentation](docs/redis_modules.md).\n\n## β€οΈ Contributing\n\nWe\'d love your contributions!\n\n**Bug reports** are especially helpful at this stage of the project. [You can open a bug report on GitHub](https://github.com/redis-om/redis-om-python/issues/new).\n\nYou can also **contribute documentation** -- or just let us know if something needs more detail. [Open an issue on GitHub](https://github.com/redis-om/redis-om-python/issues/new) to get started.\n\n## π License\n\nRedis OM uses the [BSD 3-Clause license][license-url].\n\n\n\n[version-svg]: https://img.shields.io/pypi/v/redis-om?style=flat-square\n[package-url]: https://pypi.org/project/redis-om/\n[ci-svg]: https://img.shields.io/github/workflow/status/redis-om/redis-om-python/python?style=flat-square\n[ci-url]: https://github.com/redis-om/redis-om-python/actions/workflows/build.yml\n[license-image]: http://img.shields.io/badge/license-MIT-green.svg?style=flat-square\n[license-url]: LICENSE\n\n\n[redis-om-website]: https://developer.redis.com\n[redis-om-js]: https://github.com/redis-om/redis-om-js\n[redis-om-dotnet]: https://github.com/redis-om/redis-om-dotnet\n[redis-om-spring]: https://github.com/redis-om/redis-om-spring\n[redisearch-url]: https://oss.redis.com/redisearch/\n[redis-json-url]: https://oss.redis.com/redisjson/\n[pydantic-url]: https://github.com/samuelcolvin/pydantic\n[ulid-url]: https://github.com/ulid/spec\n[redis-enterprise-url]: https://redis.com/try-free/\n',
+ 'author': 'Andrew Brookins',
+ 'author_email': 'andrew.brookins@redis.com',
+ 'maintainer': 'Andrew Brookins',
+ 'maintainer_email': 'andrew.brookins@redis.com',
+ 'url': 'https://github.com/redis-developer/redis-om-python',
+ 'packages': packages,
+ 'package_data': package_data,
+ 'install_requires': install_requires,
+ 'entry_points': entry_points,
+ 'python_requires': '>=3.7,<4.0',
+}
+from build import *
+build(setup_kwargs)
+
+setup(**setup_kwargs)
diff --git a/tests/conftest.py b/tests/conftest.py
index ebb25c7..2a7c799 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,12 +1,24 @@
+import asyncio
import random
import pytest
-from redis_om.connections import get_redis_connection
+from aredis_om import get_redis_connection
-@pytest.fixture
-def redis(event_loop):
+@pytest.fixture(scope="session")
+def event_loop(request):
+ """
+ Starlette needs a session-scoped event loop during test runs.
+ https://github.com/pytest-dev/pytest-asyncio/issues/169
+ """
+ loop = asyncio.get_event_loop_policy().new_event_loop()
+ yield loop
+ loop.close()
+
+
+@pytest.fixture(scope="session")
+def redis():
yield get_redis_connection()
diff --git a/tests/test_hash_model.py b/tests/test_hash_model.py
index e67a42e..f3da1d3 100644
--- a/tests/test_hash_model.py
+++ b/tests/test_hash_model.py
@@ -8,20 +8,24 @@ from unittest import mock
import pytest
from pydantic import ValidationError
-from redis_om.model import Field, HashModel
-from redis_om.model.migrations.migrator import Migrator
-from redis_om.model.model import (
- NotFoundError,
+from aredis_om import (
+ Field,
+ HashModel,
+ Migrator,
QueryNotSupportedError,
RedisModelError,
+ has_redisearch,
)
+if not has_redisearch():
+ pytestmark = pytest.mark.skip
+
today = datetime.date.today()
@pytest.fixture
-def m(key_prefix):
+async def m(key_prefix, redis):
class BaseHashModel(HashModel, abc.ABC):
class Meta:
global_key_prefix = key_prefix
@@ -42,7 +46,7 @@ def m(key_prefix):
model_key_prefix = "member"
primary_key_pattern = ""
- Migrator().run()
+ await Migrator(redis).run()
return namedtuple("Models", ["BaseHashModel", "Order", "Member"])(
BaseHashModel, Order, Member
@@ -50,7 +54,7 @@ def m(key_prefix):
@pytest.fixture
-def members(m):
+async def members(m):
member1 = m.Member(
first_name="Andrew",
last_name="Brookins",
@@ -74,149 +78,32 @@ def members(m):
age=100,
join_date=today,
)
- member1.save()
- member2.save()
- member3.save()
+ await member1.save()
+ await member2.save()
+ await member3.save()
yield member1, member2, member3
-def test_validates_required_fields(m):
- # Raises ValidationError: last_name is required
- with pytest.raises(ValidationError):
- m.Member(first_name="Andrew", zipcode="97086", join_date=today)
-
-
-def test_validates_field(m):
- # Raises ValidationError: join_date is not a date
- with pytest.raises(ValidationError):
- m.Member(first_name="Andrew", last_name="Brookins", join_date="yesterday")
-
-
-# Passes validation
-def test_validation_passes(m):
- member = m.Member(
- first_name="Andrew",
- last_name="Brookins",
- email="a@example.com",
- join_date=today,
- age=38,
- )
- assert member.first_name == "Andrew"
-
-
-def test_saves_model_and_creates_pk(m):
- member = m.Member(
- first_name="Andrew",
- last_name="Brookins",
- email="a@example.com",
- join_date=today,
- age=38,
- )
- # Save a model instance to Redis
- member.save()
-
- member2 = m.Member.get(member.pk)
- assert member2 == member
-
-
-def test_raises_error_with_embedded_models(m):
- class Address(m.BaseHashModel):
- address_line_1: str
- address_line_2: Optional[str]
- city: str
- country: str
- postal_code: str
-
- with pytest.raises(RedisModelError):
-
- class InvalidMember(m.BaseHashModel):
- address: Address
-
-
-@pytest.mark.skip("Not implemented yet")
-def test_saves_many(m):
- members = [
- m.Member(
- first_name="Andrew",
- last_name="Brookins",
- email="a@example.com",
- join_date=today,
- ),
- m.Member(
- first_name="Kim",
- last_name="Brookins",
- email="k@example.com",
- join_date=today,
- ),
- ]
- m.Member.add(members)
-
-
-@pytest.mark.skip("Not ready yet")
-def test_updates_a_model(members, m):
+@pytest.mark.asyncio
+async def test_exact_match_queries(members, m):
member1, member2, member3 = members
- # Or, with an implicit save:
- member1.update(last_name="Smith")
- assert m.Member.find(m.Member.pk == member1.pk).first() == member1
-
- # Or, affecting multiple model instances with an implicit save:
- m.Member.find(m.Member.last_name == "Brookins").update(last_name="Smith")
- results = m.Member.find(m.Member.last_name == "Smith")
- assert results == members
-
-
-def test_paginate_query(members, m):
- member1, member2, member3 = members
- actual = m.Member.find().sort_by("age").all(batch_size=1)
- assert actual == [member2, member1, member3]
-
-
-def test_access_result_by_index_cached(members, m):
- member1, member2, member3 = members
- query = m.Member.find().sort_by("age")
- # Load the cache, throw away the result.
- assert query._model_cache == []
- query.execute()
- assert query._model_cache == [member2, member1, member3]
-
- # Access an item that should be in the cache.
- with mock.patch.object(query.model, "db") as mock_db:
- assert query[0] == member2
- assert not mock_db.called
-
-
-def test_access_result_by_index_not_cached(members, m):
- member1, member2, member3 = members
- query = m.Member.find().sort_by("age")
-
- # Assert that we don't have any models in the cache yet -- we
- # haven't made any requests of Redis.
- assert query._model_cache == []
- assert query[0] == member2
- assert query[1] == member1
- assert query[2] == member3
-
-
-def test_exact_match_queries(members, m):
- member1, member2, member3 = members
-
- actual = m.Member.find(m.Member.last_name == "Brookins").sort_by("age").all()
+ actual = await m.Member.find(m.Member.last_name == "Brookins").sort_by("age").all()
assert actual == [member2, member1]
- actual = m.Member.find(
+ actual = await m.Member.find(
(m.Member.last_name == "Brookins") & ~(m.Member.first_name == "Andrew")
).all()
assert actual == [member2]
- actual = m.Member.find(~(m.Member.last_name == "Brookins")).all()
+ actual = await m.Member.find(~(m.Member.last_name == "Brookins")).all()
assert actual == [member3]
- actual = m.Member.find(m.Member.last_name != "Brookins").all()
+ actual = await m.Member.find(m.Member.last_name != "Brookins").all()
assert actual == [member3]
- actual = (
+ actual = await (
m.Member.find(
(m.Member.last_name == "Brookins") & (m.Member.first_name == "Andrew")
| (m.Member.first_name == "Kim")
@@ -226,16 +113,17 @@ def test_exact_match_queries(members, m):
)
assert actual == [member2, member1]
- actual = m.Member.find(
+ actual = await m.Member.find(
m.Member.first_name == "Kim", m.Member.last_name == "Brookins"
).all()
assert actual == [member2]
-def test_recursive_query_resolution(members, m):
+@pytest.mark.asyncio
+async def test_recursive_query_resolution(members, m):
member1, member2, member3 = members
- actual = (
+ actual = await (
m.Member.find(
(m.Member.last_name == "Brookins")
| (m.Member.age == 100) & (m.Member.last_name == "Smith")
@@ -246,10 +134,11 @@ def test_recursive_query_resolution(members, m):
assert actual == [member2, member1, member3]
-def test_tag_queries_boolean_logic(members, m):
+@pytest.mark.asyncio
+async def test_tag_queries_boolean_logic(members, m):
member1, member2, member3 = members
- actual = (
+ actual = await (
m.Member.find(
(m.Member.first_name == "Andrew") & (m.Member.last_name == "Brookins")
| (m.Member.last_name == "Smith")
@@ -260,7 +149,8 @@ def test_tag_queries_boolean_logic(members, m):
assert actual == [member1, member3]
-def test_tag_queries_punctuation(m):
+@pytest.mark.asyncio
+async def test_tag_queries_punctuation(m):
member1 = m.Member(
first_name="Andrew, the Michael",
last_name="St. Brookins-on-Pier",
@@ -268,7 +158,7 @@ def test_tag_queries_punctuation(m):
age=38,
join_date=today,
)
- member1.save()
+ await member1.save()
member2 = m.Member(
first_name="Bob",
@@ -277,24 +167,26 @@ def test_tag_queries_punctuation(m):
age=38,
join_date=today,
)
- member2.save()
+ await member2.save()
- assert (
- m.Member.find(m.Member.first_name == "Andrew, the Michael").first() == member1
- )
- assert (
- m.Member.find(m.Member.last_name == "St. Brookins-on-Pier").first() == member1
- )
+ result = await (m.Member.find(m.Member.first_name == "Andrew, the Michael").first())
+ assert result == member1
+
+ result = await (m.Member.find(m.Member.last_name == "St. Brookins-on-Pier").first())
+ assert result == member1
# Notice that when we index and query multiple values that use the internal
# TAG separator for single-value exact-match fields, like an indexed string,
# the queries will succeed. We apply a workaround that queries for the union
# of the two values separated by the tag separator.
- assert m.Member.find(m.Member.email == "a|b@example.com").all() == [member1]
- assert m.Member.find(m.Member.email == "a|villain@example.com").all() == [member2]
+ results = await m.Member.find(m.Member.email == "a|b@example.com").all()
+ assert results == [member1]
+ results = await m.Member.find(m.Member.email == "a|villain@example.com").all()
+ assert results == [member2]
-def test_tag_queries_negation(members, m):
+@pytest.mark.asyncio
+async def test_tag_queries_negation(members, m):
member1, member2, member3 = members
"""
@@ -304,7 +196,7 @@ def test_tag_queries_negation(members, m):
"""
query = m.Member.find(~(m.Member.first_name == "Andrew"))
- assert query.all() == [member2]
+ assert await query.all() == [member2]
"""
βfirst_name
@@ -319,7 +211,7 @@ def test_tag_queries_negation(members, m):
query = m.Member.find(
~(m.Member.first_name == "Andrew") & (m.Member.last_name == "Brookins")
)
- assert query.all() == [member2]
+ assert await query.all() == [member2]
"""
βfirst_name
@@ -338,7 +230,7 @@ def test_tag_queries_negation(members, m):
~(m.Member.first_name == "Andrew")
& ((m.Member.last_name == "Brookins") | (m.Member.last_name == "Smith"))
)
- assert query.all() == [member2]
+ assert await query.all() == [member2]
"""
βfirst_name
@@ -357,64 +249,184 @@ def test_tag_queries_negation(members, m):
~(m.Member.first_name == "Andrew") & (m.Member.last_name == "Brookins")
| (m.Member.last_name == "Smith")
)
- assert query.sort_by("age").all() == [member2, member3]
+ assert await query.sort_by("age").all() == [member2, member3]
- actual = m.Member.find(
+ actual = await m.Member.find(
(m.Member.first_name == "Andrew") & ~(m.Member.last_name == "Brookins")
).all()
assert actual == [member3]
-def test_numeric_queries(members, m):
+@pytest.mark.asyncio
+async def test_numeric_queries(members, m):
member1, member2, member3 = members
- actual = m.Member.find(m.Member.age == 34).all()
+ actual = await m.Member.find(m.Member.age == 34).all()
assert actual == [member2]
- actual = m.Member.find(m.Member.age > 34).sort_by("age").all()
+ actual = await m.Member.find(m.Member.age > 34).sort_by("age").all()
assert actual == [member1, member3]
- actual = m.Member.find(m.Member.age < 35).all()
+ actual = await m.Member.find(m.Member.age < 35).all()
assert actual == [member2]
- actual = m.Member.find(m.Member.age <= 34).all()
+ actual = await m.Member.find(m.Member.age <= 34).all()
assert actual == [member2]
- actual = m.Member.find(m.Member.age >= 100).all()
+ actual = await m.Member.find(m.Member.age >= 100).all()
assert actual == [member3]
- actual = m.Member.find(m.Member.age != 34).sort_by("age").all()
+ actual = await m.Member.find(m.Member.age != 34).sort_by("age").all()
assert actual == [member1, member3]
- actual = m.Member.find(~(m.Member.age == 100)).sort_by("age").all()
+ actual = await m.Member.find(~(m.Member.age == 100)).sort_by("age").all()
assert actual == [member2, member1]
- actual = m.Member.find(m.Member.age > 30, m.Member.age < 40).sort_by("age").all()
+ actual = (
+ await m.Member.find(m.Member.age > 30, m.Member.age < 40).sort_by("age").all()
+ )
assert actual == [member2, member1]
-def test_sorting(members, m):
+@pytest.mark.asyncio
+async def test_sorting(members, m):
member1, member2, member3 = members
- actual = m.Member.find(m.Member.age > 34).sort_by("age").all()
+ actual = await m.Member.find(m.Member.age > 34).sort_by("age").all()
assert actual == [member1, member3]
- actual = m.Member.find(m.Member.age > 34).sort_by("-age").all()
+ actual = await m.Member.find(m.Member.age > 34).sort_by("-age").all()
assert actual == [member3, member1]
with pytest.raises(QueryNotSupportedError):
# This field does not exist.
- m.Member.find().sort_by("not-a-real-field").all()
+ await m.Member.find().sort_by("not-a-real-field").all()
with pytest.raises(QueryNotSupportedError):
# This field is not sortable.
- m.Member.find().sort_by("join_date").all()
+ await m.Member.find().sort_by("join_date").all()
-def test_not_found(m):
- with pytest.raises(NotFoundError):
- # This ID does not exist.
- m.Member.get(1000)
+def test_validates_required_fields(m):
+ # Raises ValidationError: last_name is required
+ # TODO: Test the error value
+ with pytest.raises(ValidationError):
+ m.Member(first_name="Andrew", zipcode="97086", join_date=today)
+
+
+def test_validates_field(m):
+ # Raises ValidationError: join_date is not a date
+ # TODO: Test the error value
+ with pytest.raises(ValidationError):
+ m.Member(first_name="Andrew", last_name="Brookins", join_date="yesterday")
+
+
+def test_validation_passes(m):
+ member = m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date=today,
+ age=38,
+ )
+ assert member.first_name == "Andrew"
+
+
+@pytest.mark.asyncio
+async def test_saves_model_and_creates_pk(m):
+ member = m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date=today,
+ age=38,
+ )
+ # Save a model instance to Redis
+ await member.save()
+
+ member2 = await m.Member.get(member.pk)
+ assert member2 == member
+
+
+def test_raises_error_with_embedded_models(m):
+ class Address(m.BaseHashModel):
+ address_line_1: str
+ address_line_2: Optional[str]
+ city: str
+ country: str
+ postal_code: str
+
+ with pytest.raises(RedisModelError):
+
+ class InvalidMember(m.BaseHashModel):
+ address: Address
+
+
+@pytest.mark.asyncio
+async def test_saves_many(m):
+ member1 = m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date=today,
+ age=38,
+ )
+ member2 = m.Member(
+ first_name="Kim",
+ last_name="Brookins",
+ email="k@example.com",
+ join_date=today,
+ age=34,
+ )
+ members = [member1, member2]
+ result = await m.Member.add(members)
+ assert result == [member1, member2]
+
+ assert await m.Member.get(pk=member1.pk) == member1
+ assert await m.Member.get(pk=member2.pk) == member2
+
+
+@pytest.mark.asyncio
+async def test_updates_a_model(members, m):
+ member1, member2, member3 = members
+ await member1.update(last_name="Smith")
+ member = await m.Member.get(member1.pk)
+ assert member.last_name == "Smith"
+
+
+@pytest.mark.asyncio
+async def test_paginate_query(members, m):
+ member1, member2, member3 = members
+ actual = await m.Member.find().sort_by("age").all(batch_size=1)
+ assert actual == [member2, member1, member3]
+
+
+@pytest.mark.asyncio
+async def test_access_result_by_index_cached(members, m):
+ member1, member2, member3 = members
+ query = m.Member.find().sort_by("age")
+ # Load the cache, throw away the result.
+ assert query._model_cache == []
+ await query.execute()
+ assert query._model_cache == [member2, member1, member3]
+
+ # Access an item that should be in the cache.
+ with mock.patch.object(query.model, "db") as mock_db:
+ assert await query.get_item(0) == member2
+ assert not mock_db.called
+
+
+@pytest.mark.asyncio
+async def test_access_result_by_index_not_cached(members, m):
+ member1, member2, member3 = members
+ query = m.Member.find().sort_by("age")
+
+ # Assert that we don't have any models in the cache yet -- we
+ # haven't made any requests of Redis.
+ assert query._model_cache == []
+ assert await query.get_item(0) == member2
+ assert await query.get_item(1) == member1
+ assert await query.get_item(2) == member3
def test_schema(m, key_prefix):
diff --git a/tests/test_json_model.py b/tests/test_json_model.py
index b09a4de..91fc918 100644
--- a/tests/test_json_model.py
+++ b/tests/test_json_model.py
@@ -1,5 +1,4 @@
import abc
-import asyncio
import datetime
import decimal
from collections import namedtuple
@@ -9,15 +8,21 @@ from unittest import mock
import pytest
from pydantic import ValidationError
-from redis_om.model import EmbeddedJsonModel, Field, JsonModel
-from redis_om.model.migrations.migrator import Migrator
-from redis_om.model.model import (
+from aredis_om import (
+ EmbeddedJsonModel,
+ Field,
+ JsonModel,
+ Migrator,
NotFoundError,
QueryNotSupportedError,
RedisModelError,
+ has_redis_json,
)
+if not has_redis_json():
+ pytestmark = pytest.mark.skip
+
today = datetime.date.today()
@@ -160,7 +165,7 @@ async def test_validation_passes(address, m):
@pytest.mark.asyncio
async def test_saves_model_and_creates_pk(address, m, redis):
await Migrator(redis).run()
-
+
member = m.Member(
first_name="Andrew",
last_name="Brookins",
@@ -177,28 +182,66 @@ async def test_saves_model_and_creates_pk(address, m, redis):
assert member2.address == address
-@pytest.mark.skip("Not implemented yet")
@pytest.mark.asyncio
-async def test_saves_many(address, m):
- members = [
- m.Member(
- first_name="Andrew",
- last_name="Brookins",
- email="a@example.com",
- join_date=today,
- address=address,
- age=38,
- ),
- m.Member(
- first_name="Kim",
- last_name="Brookins",
- email="k@example.com",
- join_date=today,
- address=address,
- age=34,
- ),
- ]
- m.Member.add(members)
+async def test_saves_many_implicit_pipeline(address, m):
+ member1 = m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date=today,
+ address=address,
+ age=38,
+ )
+ member2 = m.Member(
+ first_name="Kim",
+ last_name="Brookins",
+ email="k@example.com",
+ join_date=today,
+ address=address,
+ age=34,
+ )
+ members = [member1, member2]
+ result = await m.Member.add(members)
+ assert result == [member1, member2]
+
+ assert await m.Member.get(pk=member1.pk) == member1
+ assert await m.Member.get(pk=member2.pk) == member2
+
+
+@pytest.mark.asyncio
+async def test_saves_many_explicit_transaction(address, m):
+ member1 = m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date=today,
+ address=address,
+ age=38,
+ )
+ member2 = m.Member(
+ first_name="Kim",
+ last_name="Brookins",
+ email="k@example.com",
+ join_date=today,
+ address=address,
+ age=34,
+ )
+ members = [member1, member2]
+ result = await m.Member.add(members)
+ assert result == [member1, member2]
+
+ assert await m.Member.get(pk=member1.pk) == member1
+ assert await m.Member.get(pk=member2.pk) == member2
+
+ # Test the explicit pipeline path -- here, we add multiple Members
+ # using a single Redis transaction, with MULTI/EXEC.
+ async with m.Member.db().pipeline(transaction=True) as pipeline:
+ await m.Member.add(members, pipeline=pipeline)
+ assert result == [member1, member2]
+ assert await pipeline.execute() == ["OK", "OK"]
+
+ assert await m.Member.get(pk=member1.pk) == member1
+ assert await m.Member.get(pk=member2.pk) == member2
async def save(members):
@@ -207,25 +250,19 @@ async def save(members):
return members
-@pytest.mark.skip("Not ready yet")
@pytest.mark.asyncio
async def test_updates_a_model(members, m):
member1, member2, member3 = await save(members)
- # Or, with an implicit save:
- member1.update(last_name="Smith")
- assert m.Member.find(m.Member.pk == member1.pk).first() == member1
+ # Update a field directly on the model
+ await member1.update(last_name="Apples to oranges")
+ member = await m.Member.get(member1.pk)
+ assert member.last_name == "Apples to oranges"
- # Or, affecting multiple model instances with an implicit save:
- m.Member.find(m.Member.last_name == "Brookins").update(last_name="Smith")
- results = m.Member.find(m.Member.last_name == "Smith")
- assert results == members
-
- # Or, updating a field in an embedded model:
- member2.update(address__city="Happy Valley")
- assert (
- m.Member.find(m.Member.pk == member2.pk).first().address.city == "Happy Valley"
- )
+ # Update a field in an embedded model
+ await member2.update(address__city="Happy Valley")
+ member = await m.Member.get(member2.pk)
+ assert member.address.city == "Happy Valley"
@pytest.mark.asyncio
@@ -246,7 +283,7 @@ async def test_access_result_by_index_cached(members, m):
# Access an item that should be in the cache.
with mock.patch.object(query.model, "db") as mock_db:
- assert query[0] == member2
+ assert await query.get_item(0) == member2
assert not mock_db.called
@@ -258,9 +295,9 @@ async def test_access_result_by_index_not_cached(members, m):
# Assert that we don't have any models in the cache yet -- we
# haven't made any requests of Redis.
assert query._model_cache == []
- assert query.get_item(0) == member2
- assert query.get_item(1) == member1
- assert query.get_item(2) == member3
+ assert await query.get_item(0) == member2
+ assert await query.get_item(1) == member1
+ assert await query.get_item(2) == member3
@pytest.mark.asyncio
@@ -274,7 +311,6 @@ async def test_in_query(members, m):
assert actual == [member2, member1, member3]
-@pytest.mark.skip("Not implemented yet")
@pytest.mark.asyncio
async def test_update_query(members, m):
member1, member2, member3 = members
@@ -286,8 +322,8 @@ async def test_update_query(members, m):
.sort_by("age")
.all()
)
- assert actual == [member1, member2, member3]
- assert all([m.name == "Bobby" for m in actual])
+ assert len(actual) == 3
+ assert all([m.first_name == "Bobby" for m in actual])
@pytest.mark.asyncio
@@ -323,7 +359,9 @@ async def test_exact_match_queries(members, m):
).all()
assert actual == [member2]
- actual = await m.Member.find(m.Member.address.city == "Portland").sort_by("age").all()
+ actual = (
+ await m.Member.find(m.Member.address.city == "Portland").sort_by("age").all()
+ )
assert actual == [member2, member1, member3]
@@ -349,7 +387,9 @@ async def test_recursive_query_field_resolution(members, m):
description="Weird house", created_on=datetime.datetime.now()
)
await member1.save()
- actual = await m.Member.find(m.Member.address.note.description == "Weird house").all()
+ actual = await m.Member.find(
+ m.Member.address.note.description == "Weird house"
+ ).all()
assert actual == [member1]
member1.orders = [
@@ -416,10 +456,12 @@ async def test_tag_queries_punctuation(address, m):
await member2.save()
assert (
- await m.Member.find(m.Member.first_name == "Andrew, the Michael").first() == member1
+ await m.Member.find(m.Member.first_name == "Andrew, the Michael").first()
+ == member1
)
assert (
- await m.Member.find(m.Member.last_name == "St. Brookins-on-Pier").first() == member1
+ await m.Member.find(m.Member.last_name == "St. Brookins-on-Pier").first()
+ == member1
)
# Notice that when we index and query multiple values that use the internal
@@ -427,7 +469,9 @@ async def test_tag_queries_punctuation(address, m):
# the queries will succeed. We apply a workaround that queries for the union
# of the two values separated by the tag separator.
assert await m.Member.find(m.Member.email == "a|b@example.com").all() == [member1]
- assert await m.Member.find(m.Member.email == "a|villain@example.com").all() == [member2]
+ assert await m.Member.find(m.Member.email == "a|villain@example.com").all() == [
+ member2
+ ]
@pytest.mark.asyncio
@@ -509,7 +553,7 @@ async def test_numeric_queries(members, m):
actual = await m.Member.find(m.Member.age == 34).all()
assert actual == [member2]
- actual = await m.Member.find(m.Member.age > 34).all()
+ actual = await m.Member.find(m.Member.age > 34).sort_by("age").all()
assert actual == [member1, member3]
actual = await m.Member.find(m.Member.age < 35).all()
@@ -524,7 +568,9 @@ async def test_numeric_queries(members, m):
actual = await m.Member.find(~(m.Member.age == 100)).sort_by("age").all()
assert actual == [member2, member1]
- actual = await m.Member.find(m.Member.age > 30, m.Member.age < 40).sort_by("age").all()
+ actual = (
+ await m.Member.find(m.Member.age > 30, m.Member.age < 40).sort_by("age").all()
+ )
assert actual == [member2, member1]
actual = await m.Member.find(m.Member.age != 34).sort_by("age").all()
diff --git a/tests/test_oss_redis_features.py b/tests/test_oss_redis_features.py
new file mode 100644
index 0000000..9c74370
--- /dev/null
+++ b/tests/test_oss_redis_features.py
@@ -0,0 +1,175 @@
+import abc
+import datetime
+import decimal
+from collections import namedtuple
+from typing import Optional
+
+import pytest
+from pydantic import ValidationError
+
+from aredis_om import HashModel, Migrator, NotFoundError, RedisModelError
+
+
+today = datetime.date.today()
+
+
+@pytest.fixture
+async def m(key_prefix, redis):
+ class BaseHashModel(HashModel, abc.ABC):
+ class Meta:
+ global_key_prefix = key_prefix
+
+ class Order(BaseHashModel):
+ total: decimal.Decimal
+ currency: str
+ created_on: datetime.datetime
+
+ class Member(BaseHashModel):
+ first_name: str
+ last_name: str
+ email: str
+ join_date: datetime.date
+ age: int
+
+ class Meta:
+ model_key_prefix = "member"
+ primary_key_pattern = ""
+
+ await Migrator(redis).run()
+
+ return namedtuple("Models", ["BaseHashModel", "Order", "Member"])(
+ BaseHashModel, Order, Member
+ )
+
+
+@pytest.fixture
+async def members(m):
+ member1 = m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ age=38,
+ join_date=today,
+ )
+
+ member2 = m.Member(
+ first_name="Kim",
+ last_name="Brookins",
+ email="k@example.com",
+ age=34,
+ join_date=today,
+ )
+
+ member3 = m.Member(
+ first_name="Andrew",
+ last_name="Smith",
+ email="as@example.com",
+ age=100,
+ join_date=today,
+ )
+ await member1.save()
+ await member2.save()
+ await member3.save()
+
+ yield member1, member2, member3
+
+
+@pytest.mark.asyncio
+async def test_all_keys(members, m):
+ pks = sorted([pk async for pk in await m.Member.all_pks()])
+ assert len(pks) == 3
+ assert pks == sorted([m.pk for m in members])
+
+
+@pytest.mark.asyncio
+async def test_not_found(m):
+ with pytest.raises(NotFoundError):
+ # This ID does not exist.
+ await m.Member.get(1000)
+
+
+def test_validates_required_fields(m):
+ # Raises ValidationError: last_name is required
+ # TODO: Test the error value
+ with pytest.raises(ValidationError):
+ m.Member(first_name="Andrew", zipcode="97086", join_date=today)
+
+
+def test_validates_field(m):
+ # Raises ValidationError: join_date is not a date
+ # TODO: Test the error value
+ with pytest.raises(ValidationError):
+ m.Member(first_name="Andrew", last_name="Brookins", join_date="yesterday")
+
+
+def test_validation_passes(m):
+ member = m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date=today,
+ age=38,
+ )
+ assert member.first_name == "Andrew"
+
+
+@pytest.mark.asyncio
+async def test_saves_model_and_creates_pk(m):
+ member = m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date=today,
+ age=38,
+ )
+ # Save a model instance to Redis
+ await member.save()
+
+ member2 = await m.Member.get(member.pk)
+ assert member2 == member
+
+
+def test_raises_error_with_embedded_models(m):
+ class Address(m.BaseHashModel):
+ address_line_1: str
+ address_line_2: Optional[str]
+ city: str
+ country: str
+ postal_code: str
+
+ with pytest.raises(RedisModelError):
+
+ class InvalidMember(m.BaseHashModel):
+ address: Address
+
+
+@pytest.mark.asyncio
+async def test_saves_many(m):
+ member1 = m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="a@example.com",
+ join_date=today,
+ age=38,
+ )
+ member2 = m.Member(
+ first_name="Kim",
+ last_name="Brookins",
+ email="k@example.com",
+ join_date=today,
+ age=34,
+ )
+ members = [member1, member2]
+ result = await m.Member.add(members)
+ assert result == [member1, member2]
+
+ assert await m.Member.get(pk=member1.pk) == member1
+ assert await m.Member.get(pk=member2.pk) == member2
+
+
+@pytest.mark.asyncio
+async def test_updates_a_model(members, m):
+ member1, member2, member3 = members
+ await member1.update(last_name="Smith")
+ member = await m.Member.get(member1.pk)
+ assert member.last_name == "Smith"
diff --git a/tests/test_pydantic_integrations.py b/tests/test_pydantic_integrations.py
new file mode 100644
index 0000000..921c1fe
--- /dev/null
+++ b/tests/test_pydantic_integrations.py
@@ -0,0 +1,49 @@
+import abc
+import datetime
+from collections import namedtuple
+
+import pytest
+from pydantic import EmailStr, ValidationError
+
+from aredis_om import Field, HashModel, Migrator
+
+
+today = datetime.date.today()
+
+
+@pytest.fixture
+async def m(key_prefix, redis):
+ class BaseHashModel(HashModel, abc.ABC):
+ class Meta:
+ global_key_prefix = key_prefix
+
+ class Member(BaseHashModel):
+ first_name: str
+ last_name: str
+ email: EmailStr = Field(index=True)
+ join_date: datetime.date
+ age: int
+
+ await Migrator(redis).run()
+
+ return namedtuple("Models", ["Member"])(Member)
+
+
+def test_email_str(m):
+ with pytest.raises(ValidationError):
+ m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="not an email!",
+ age=38,
+ join_date=today,
+ )
+
+ with pytest.raises(ValidationError):
+ m.Member(
+ first_name="Andrew",
+ last_name="Brookins",
+ email="andrew@bad-domain",
+ age=38,
+ join_date=today,
+ )
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..6ec4fbf
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,9 @@
+[tox]
+skipsdist = true
+envlist = py37, py38, py39, py310
+
+[testenv]
+whitelist_externals = poetry
+commands =
+ poetry install -v
+ poetry run pytest