Merge branch 'main' into asyncio

This commit is contained in:
Andrew Brookins 2021-11-09 15:59:10 -08:00
commit ca6ae7d6e9
47 changed files with 3285 additions and 760 deletions

View file

@ -12,7 +12,7 @@ on:
- '[0-9].[0-9]+' - '[0-9].[0-9]+'
- 'update/pre-commit-autoupdate' - 'update/pre-commit-autoupdate'
schedule: schedule:
- cron: '0 6 * * *' # Daily 6AM UTC build - cron: '0 6 * * *' # Daily 6AM UTC build
jobs: jobs:
@ -22,63 +22,57 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 5 timeout-minutes: 5
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2.3.4 uses: actions/checkout@v2.3.5
- name: Setup Python 3.9 - name: Setup Python 3.9
uses: actions/setup-python@v2 uses: actions/setup-python@v2
with: with:
python-version: 3.9 python-version: 3.9
#---------------------------------------------- #----------------------------------------------
# ----- install & configure poetry ----- # ----- install & configure poetry -----
#---------------------------------------------- #----------------------------------------------
- name: Install Poetry - name: Install Poetry
uses: snok/install-poetry@v1 uses: snok/install-poetry@v1
with: with:
virtualenvs-create: true virtualenvs-create: true
virtualenvs-in-project: true virtualenvs-in-project: true
installer-parallel: true installer-parallel: true
#---------------------------------------------- #----------------------------------------------
# load cached venv if cache exists # load cached venv if cache exists
#---------------------------------------------- #----------------------------------------------
- name: Load cached venv - name: Load cached venv
id: cached-poetry-dependencies id: cached-poetry-dependencies
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
path: .venv path: .venv
key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
#---------------------------------------------- #----------------------------------------------
# install dependencies if cache does not exist # install dependencies if cache does not exist
#---------------------------------------------- #----------------------------------------------
- name: Install dependencies - name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: poetry install --no-interaction --no-root run: poetry install --no-interaction --no-root
#---------------------------------------------- #----------------------------------------------
# install your root project, if required # install your root project, if required
#---------------------------------------------- #----------------------------------------------
- name: Install library - name: Install library
run: poetry install --no-interaction run: poetry install --no-interaction
#---------------------------------------------- #----------------------------------------------
# run test suite # run test suite
#---------------------------------------------- #----------------------------------------------
- name: Run linter - name: Run linter
run: | run: |
make lint make dist
# - name: Prepare twine checker make lint
# run: |
# pip install -U twine wheel
# python setup.py sdist bdist_wheel
# - name: Run twine checker
# run: |
# twine check dist/*
test-unix: test-unix:
name: Test Unix name: Test Unix
needs: lint needs: lint
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest] os: [ ubuntu-latest ]
pyver: [3.6, 3.7, 3.8, 3.9, pypy3] pyver: [ 3.6, 3.7, 3.8, 3.9, pypy3 ]
redismod: ["edge", "preview"] redismod: [ "preview" ]
fail-fast: false fail-fast: false
services: services:
redis: redis:
@ -98,75 +92,95 @@ jobs:
OS: ${{ matrix.os }} OS: ${{ matrix.os }}
INSTALL_DIR: ${{ github.workspace }}/redis INSTALL_DIR: ${{ github.workspace }}/redis
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2.3.4 uses: actions/checkout@v2.3.5
- name: Setup Python ${{ matrix.pyver }} - name: Setup Python ${{ matrix.pyver }}
uses: actions/setup-python@v2 uses: actions/setup-python@v2
with: with:
python-version: ${{ matrix.pyver }} python-version: ${{ matrix.pyver }}
#---------------------------------------------- #----------------------------------------------
# ----- install & configure poetry ----- # ----- install & configure poetry -----
#---------------------------------------------- #----------------------------------------------
- name: Install Poetry - name: Install Poetry
uses: snok/install-poetry@v1 uses: snok/install-poetry@v1
with: with:
virtualenvs-create: true virtualenvs-create: true
virtualenvs-in-project: true virtualenvs-in-project: true
installer-parallel: true installer-parallel: true
#---------------------------------------------- #----------------------------------------------
# load cached venv if cache exists # load cached venv if cache exists
#---------------------------------------------- #----------------------------------------------
- name: Load cached venv - name: Load cached venv
id: cached-poetry-dependencies id: cached-poetry-dependencies
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
path: .venv path: .venv
key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
#---------------------------------------------- #----------------------------------------------
# install dependencies if cache does not exist # install dependencies if cache does not exist
#---------------------------------------------- #----------------------------------------------
- name: Install dependencies - name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: poetry install --no-interaction --no-root run: poetry install --no-interaction --no-root
#---------------------------------------------- #----------------------------------------------
# install your root project, if required # install your root project, if required
#---------------------------------------------- #----------------------------------------------
- name: Install library - name: Install library
run: poetry install --no-interaction run: poetry install --no-interaction
- name: Run unittests (redismod:${{ matrix.redismod }}, ${{ matrix.os }}) - name: Run unittests (redismod:${{ matrix.redismod }}, ${{ matrix.os }})
run: | run: |
make test make test
poetry run coverage xml poetry run coverage xml
- name: Upload coverage - name: Upload coverage
uses: codecov/codecov-action@v2.1.0 uses: codecov/codecov-action@v2.1.0
with: with:
file: ./coverage.xml file: ./coverage.xml
flags: unit flags: unit
env_vars: OS env_vars: OS
fail_ci_if_error: false fail_ci_if_error: false
deploy: deploy:
name: Deploy name: Deploy
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: test-unix needs: test-unix
# Run only on pushing a tag # Run only on pushing a tag
if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2.3.4 uses: actions/checkout@v2.3.5
- name: Setup Python 3.9 - name: Setup Python 3.9
uses: actions/setup-python@v2 uses: actions/setup-python@v2
with: with:
python-version: 3.9 python-version: 3.9
- name: Install dependencies - name: Install Poetry
run: uses: snok/install-poetry@v1
python -m pip install -U pip wheel twine with:
- name: Make dists virtualenvs-create: true
run: virtualenvs-in-project: true
python setup.py sdist bdist_wheel installer-parallel: true
- name: PyPI upload #----------------------------------------------
env: # load cached venv if cache exists
TWINE_USERNAME: __token__ #----------------------------------------------
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} - name: Load cached venv
run: | id: cached-poetry-dependencies
twine upload dist/* uses: actions/cache@v2
with:
path: .venv
key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
#----------------------------------------------
# install dependencies if cache does not exist
#----------------------------------------------
- name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: poetry install --no-interaction --no-root
#----------------------------------------------
# install your root project, if required
#----------------------------------------------
- name: Install library
run: poetry install --no-interaction
- name: PyPI upload
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
run: |
make upload

6
.gitignore vendored
View file

@ -128,3 +128,9 @@ dmypy.json
# Pyre type checker # Pyre type checker
.pyre/ .pyre/
data data
# Makefile install checker
.install.stamp
# Sync version of the library, via Unasync
redis_om/

View file

39
LICENSE
View file

@ -1,21 +1,26 @@
The MIT License (MIT) Copyright 2021 Redis, Inc.
Copyright (c) 2021-present Redis, Inc. Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Permission is hereby granted, free of charge, to any person obtaining a copy 1. Redistributions of source code must retain the above copyright notice, this
of this software and associated documentation files (the "Software"), to deal list of conditions and the following disclaimer.
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all 2. Redistributions in binary form must reproduce the above copyright notice,
copies or substantial portions of the Software. this list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 3. Neither the name of the copyright holder nor the names of its contributors
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, may be used to endorse or promote products derived from this software without
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE specific prior written permission.
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
SOFTWARE. WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -1,4 +1,5 @@
NAME := redis_om NAME := aredis_om
SYNC_NAME := redis_om
INSTALL_STAMP := .install.stamp INSTALL_STAMP := .install.stamp
POETRY := $(shell command -v poetry 2> /dev/null) POETRY := $(shell command -v poetry 2> /dev/null)
@ -19,7 +20,7 @@ help:
@echo "Check the Makefile to know exactly what each target is doing." @echo "Check the Makefile to know exactly what each target is doing."
install: $(INSTALL_STAMP) install: $(INSTALL_STAMP)
$(INSTALL_STAMP): pyproject.toml poetry.lock $(INSTALL_STAMP): pyproject.toml
@if [ -z $(POETRY) ]; then echo "Poetry could not be found. See https://python-poetry.org/docs/"; exit 2; fi @if [ -z $(POETRY) ]; then echo "Poetry could not be found. See https://python-poetry.org/docs/"; exit 2; fi
$(POETRY) install $(POETRY) install
touch $(INSTALL_STAMP) touch $(INSTALL_STAMP)
@ -28,23 +29,48 @@ $(INSTALL_STAMP): pyproject.toml poetry.lock
clean: clean:
find . -type d -name "__pycache__" | xargs rm -rf {}; find . -type d -name "__pycache__" | xargs rm -rf {};
rm -rf $(INSTALL_STAMP) .coverage .mypy_cache rm -rf $(INSTALL_STAMP) .coverage .mypy_cache
rm -rf build
rm -rf dist
rm -rf redis_om
.PHONY: dist
dist: $(INSTALL_STAMP) clean sync
$(POETRY) build
.PHONY: sync
sync: $(INSTALL_STAMP)
$(POETRY) run python make_sync.py
.PHONY: upload
upload: dist
$(POETRY) run twine upload dist/*
.PHONY: lint .PHONY: lint
lint: $(INSTALL_STAMP) lint: $(INSTALL_STAMP) dist
$(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME) $(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME) $(SYNC_NAME)
$(POETRY) run black ./tests/ $(NAME) $(POETRY) run black ./tests/ $(NAME)
$(POETRY) run flake8 --ignore=W503,E501,F401,E731 ./tests/ $(NAME) $(POETRY) run flake8 --ignore=W503,E501,F401,E731 ./tests/ $(NAME) $(SYNC_NAME)
$(POETRY) run mypy ./tests/ $(NAME) --ignore-missing-imports $(POETRY) run mypy ./tests/ $(NAME) $(SYNC_NAME) --ignore-missing-imports
$(POETRY) run bandit -r $(NAME) -s B608 $(POETRY) run bandit -r $(NAME) $(SYNC_NAME) -s B608
$(POETRY) run twine check dist/*
.PHONY: format .PHONY: format
format: $(INSTALL_STAMP) format: $(INSTALL_STAMP) sync
$(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME) $(POETRY) run isort --profile=black --lines-after-imports=2 ./tests/ $(NAME) $(SYNC_NAME)
$(POETRY) run black ./tests/ $(NAME) $(POETRY) run black ./tests/ $(NAME) $(SYNC_NAME)
.PHONY: test .PHONY: test
test: $(INSTALL_STAMP) test: $(INSTALL_STAMP) sync
$(POETRY) run pytest -n auto -s -vv ./tests/ --cov-report term-missing --cov $(NAME) $(POETRY) run pytest -n auto -s -vv ./tests/ --cov-report term-missing --cov $(NAME) $(SYNC_NAME)
.PHONY: test_oss
test_oss: $(INSTALL_STAMP) sync
# Specifically tests against a local OSS Redis instance via
# docker-compose.yml. Do not use this for CI testing, where we should
# instead have a matrix of Docker images.
REDIS_OM_URL="redis://localhost:6381" $(POETRY) run pytest -n auto -s -vv ./tests/ --cov-report term-missing --cov $(NAME)
.PHONY: shell .PHONY: shell
shell: $(INSTALL_STAMP) shell: $(INSTALL_STAMP)

312
README.md
View file

@ -1,7 +1,14 @@
<h1 align="center">Redis OM</h1> <div align="center">
<br/>
<br/>
<img width="360" src="images/logo.svg" alt="Redis OM" />
<br/>
<br/>
</div>
<p align="center"> <p align="center">
<p align="center"> <p align="center">
Objecting mapping and more, for Redis. Object mapping, and more, for Redis and Python
</p> </p>
</p> </p>
@ -11,52 +18,243 @@
[![License][license-image]][license-url] [![License][license-image]][license-url]
[![Build Status][ci-svg]][ci-url] [![Build Status][ci-svg]][ci-url]
Redis OM is a library that helps you build modern Python applications with Redis. **Redis OM Python** makes it easy to model Redis data in your Python applications.
**Redis OM Python** | [Redis OM Node.js][redis-om-js] | [Redis OM Spring][redis-om-spring] | [Redis OM .NET][redis-om-dotnet] **Redis OM Python** | [Redis OM Node.js][redis-om-js] | [Redis OM Spring][redis-om-spring] | [Redis OM .NET][redis-om-dotnet]
<details> <details>
<summary><strong>Table of contents</strong></summary> <summary><strong>Table of contents</strong></summary>
<!-- START doctoc generated TOC please keep comment here to allow auto update --> span
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE --> <!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [💡 Why Redis OM?](#-why-redis-om)
- [Why Redis OM?](#why) - [📇 Modeling Your Data](#-modeling-your-data)
- [Getting started](#getting-started) - [✓ Validating Data With Your Model](#-validating-data-with-your-model)
- [Installation](#installation) - [🔎 Rich Queries and Embedded Models](#-rich-queries-and-embedded-models)
- [Documentation](#documentation) - [💻 Installation](#-installation)
- [Troubleshooting](#troubleshooting) - [📚 Documentation](#-documentation)
- [Contributing](#contributing) - [⛏️ Troubleshooting](#-troubleshooting)
- [License](#license) - [✨ So, How Do You Get RediSearch and RedisJSON?](#-so-how-do-you-get-redisearch-and-redisjson)
- [❤️ Contributing](#-contributing)
- [📝 License](#-license)
<!-- END doctoc generated TOC please keep comment here to allow auto update --> <!-- END doctoc generated TOC please keep comment here to allow auto update -->
</details> </details>
## Why Redis OM? ## 💡 Why Redis OM?
Redis OM is a library of high-level tools that help you build modern Python applications with Redis. Redis OM provides high-level abstractions that make it easy to model and query data in Redis with modern Python applications.
This *preview release* includes our first major component: a **declarative model class** backed by Redis. This **preview** release contains the following features:
## 🏁 Getting started * Declarative object mapping for Redis objects
* Declarative secondary-index generation
* Fluent APIs for querying Redis
### Object Mapping ## 📇 Modeling Your Data
With Redis OM, you get powerful data modeling, validation, and query expressions with a small amount of code. Redis OM contains powerful declarative models that give you data validation, serialization, and persistence to Redis.
Check out this example: Check out this example of modeling customer data with Redis OM. First, we create a `Customer` model:
```python ```python
import datetime import datetime
from typing import Optional from typing import Optional
from redis_om.model import ( from pydantic import EmailStr
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: EmailStr
join_date: datetime.date
age: int
bio: Optional[str]
```
Now that we have a `Customer` model, let's use it to save customer data to Redis.
```python
import datetime
from typing import Optional
from pydantic import EmailStr
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: EmailStr
join_date: datetime.date
age: int
bio: Optional[str]
# First, we create a new `Customer` object:
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38,
bio="Python developer, works at Redis, Inc."
)
# The model generates a globally unique primary key automatically
# without needing to talk to Redis.
print(andrew.pk)
# > '01FJM6PH661HCNNRC884H6K30C'
# We can save the model to Redis by calling `save()`:
andrew.save()
# To retrieve this customer with its primary key, we use `Customer.get()`:
assert Customer.get(andrew.pk) == andrew
```
**Ready to learn more?** Check out the [getting started](docs/getting_started.md) guide.
Or, continue reading to see how Redis OM makes data validation a snap.
## ✓ Validating Data With Your Model
Redis OM uses [Pydantic][pydantic-url] to validate data based on the type annotations you assign to fields in a model class.
This validation ensures that fields like `first_name`, which the `Customer` model marked as a `str`, are always strings. **But every Redis OM model is also a Pydantic model**, so you can use Pydantic validators like `EmailStr`, `Pattern`, and many more for complex validations!
For example, because we used the `EmailStr` type for the `email` field, we'll get a validation error if we try to create a `Customer` with an invalid email address:
```python
import datetime
from typing import Optional
from pydantic import EmailStr, ValidationError
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: EmailStr
join_date: datetime.date
age: int
bio: Optional[str]
try:
Customer(
first_name="Andrew",
last_name="Brookins",
email="Not an email address!",
join_date=datetime.date.today(),
age=38,
bio="Python developer, works at Redis, Inc."
)
except ValidationError as e:
print(e)
"""
pydantic.error_wrappers.ValidationError: 1 validation error for Customer
email
value is not a valid email address (type=value_error.email)
"""
```
**Any existing Pydantic validator should work** as a drop-in type annotation with a Redis OM model. You can also write arbitrarily complex custom validations!
To learn more, see the [documentation on data validation](docs/validation.md).
## 🔎 Rich Queries and Embedded Models
Data modeling, validation, and saving models to Redis all work regardless of how you run Redis.
Next, we'll show you the **rich query expressions** and **embedded models** Redis OM provides when the [RediSearch][redisearch-url] and [RedisJSON][redis-json-url] modules are installed in your Redis deployment, or you're using [Redis Enterprise][redis-enterprise-url].
**TIP**: *Wait, what's a Redis module?* If you aren't familiar with Redis modules, review the [So, How Do You Get RediSearch and RedisJSON?](#-so-how-do-you-get-redisearch-and-redisjson) section of this README.
### Querying
Redis OM comes with a rich query language that allows you to query Redis with Python expressions.
To show how this works, we'll make a small change to the `Customer` model we defined earlier. We'll add `Field(index=True)` to tell Redis OM that we want to index the `last_name` and `age` fields:
```python
import datetime
from typing import Optional
from pydantic import EmailStr
from redis_om import (
Field,
HashModel,
Migrator
)
from redis_om import get_redis_connection
class Customer(HashModel):
first_name: str
last_name: str = Field(index=True)
email: EmailStr
join_date: datetime.date
age: int = Field(index=True)
bio: Optional[str]
# Now, if we use this model with a Redis deployment that has the
# RediSearch module installed, we can run queries like the following.
# Before running queries, we need to run migrations to set up the
# indexes that Redis OM will use. You can also use the `migrate`
# CLI tool for this!
redis = get_redis_connection()
Migrator(redis).run()
# Find all customers with the last name "Brookins"
Customer.find(Customer.last_name == "Brookins").all()
# Find all customers that do NOT have the last name "Brookins"
Customer.find(Customer.last_name != "Brookins").all()
# Find all customers whose last name is "Brookins" OR whose age is
# 100 AND whose last name is "Smith"
Customer.find((Customer.last_name == "Brookins") | (
Customer.age == 100
) & (Customer.last_name == "Smith")).all()
```
These queries -- and more! -- are possible because **Redis OM manages indexes for you automatically**.
Querying with this index features a rich expression syntax inspired by the Django ORM, SQLAlchemy, and Peewee. We think you'll enjoy it!
To learn more about how to query with Redis OM, see the [documentation on querying](docs/querying.md).
****
### Embedded Models
Redis OM can store and query **nested models** like any document database, with the speed and power you get from Redis. Let's see how this works.
In the next example, we'll define a new `Address` model and embed it within the `Customer` model.
```python
import datetime
from typing import Optional
from redis_om import (
EmbeddedJsonModel, EmbeddedJsonModel,
JsonModel, JsonModel,
Field, Field,
Migrator,
) )
from redis_om import get_redis_connection
class Address(EmbeddedJsonModel): class Address(EmbeddedJsonModel):
address_line_1: str address_line_1: str
@ -78,41 +276,23 @@ class Customer(JsonModel):
# Creates an embedded model. # Creates an embedded model.
address: Address address: Address
```
The example code defines `Address` and `Customer` models for use with a Redis database with the [RedisJSON](redis-json-url) module installed.
With these two classes defined, you can now: # With these two models and a Redis deployment with the RedisJSON
# module installed, we can run queries like the following.
* Validate data based on the model's type annotations using [Pydantic](pydantic-url) # Before running queries, we need to run migrations to set up the
* Persist model instances to Redis as JSON # indexes that Redis OM will use. You can also use the `migrate`
* Instantiate model instances from Redis by primary key (a client-generated [ULID](ulid-url)) # CLI tool for this!
* Query on any indexed fields in the models redis = get_redis_connection()
Migrator(redis).run()
### Querying
Querying uses a rich expression syntax inspired by the Django ORM, SQLAlchemy, and Peewee.
Here are a few example queries that use the models we defined earlier:
```python
# Find all customers with the last name "Brookins"
Customer.find(Customer.last_name == "Brookins").all()
# Find all customers that do NOT have the last name "Brookins"
Customer.find(Customer.last_name != "Brookins").all()
# Find all customers whose last name is "Brookins" OR whose age is
# 100 AND whose last name is "Smith"
Customer.find((Customer.last_name == "Brookins") | (
Customer.age == 100
) & (Customer.last_name == "Smith")).all()
# Find all customers who live in San Antonio, TX # Find all customers who live in San Antonio, TX
Customer.find(Customer.address.city == "San Antonio", Customer.find(Customer.address.city == "San Antonio",
Customer.address.state == "TX") Customer.address.state == "TX")
``` ```
Ready to learn more? Read the [getting started](docs/getting_started.md) guide or check out how to [add Redis OM to your FastAPI project](docs/integrating.md). To learn more, read the [documentation on embedded models](docs/embedded.md).
## 💻 Installation ## 💻 Installation
@ -128,7 +308,7 @@ $ poetry add redis-om
## 📚 Documentation ## 📚 Documentation
Documentation is available [here](docs/index.md). The Redis OM documentation is available [here](docs/index.md).
## ⛏️ Troubleshooting ## ⛏️ Troubleshooting
@ -137,36 +317,13 @@ If you run into trouble or have any questions, we're here to help!
First, check the [FAQ](docs/faq.md). If you don't find the answer there, First, check the [FAQ](docs/faq.md). If you don't find the answer there,
hit us up on the [Redis Discord Server](http://discord.gg/redis). hit us up on the [Redis Discord Server](http://discord.gg/redis).
## ✨ RediSearch and RedisJSON ## ✨ So How Do You Get RediSearch and RedisJSON?
Redis OM relies on core features from two source available Redis modules: **RediSearch** and **RedisJSON**. Some advanced features of Redis OM rely on core features from two source available Redis modules: [RediSearch][redisearch-url] and [RedisJSON][redis-json-url].
These modules are the "magic" behind the scenes: You can run these modules in your self-hosted Redis deployment, or you can use [Redis Enterprise][redis-enterprise-url], which includes both modules.
* RediSearch adds querying, indexing, and full-text search to Redis To learn more, read [our documentation](docs/redis_modules.md).
* RedisJSON adds the JSON data type to Redis
### Why this is important
Without RediSearch or RedisJSON installed, you can still use Redis OM to create declarative models backed by Redis.
We'll store your model data in Redis as Hashes, and you can retrieve models using their primary keys. You'll also get all the validation features from Pydantic.
So, what won't work without these modules?
1. Without RedisJSON, you won't be able to nest models inside each other, like we did with the example model of a `Customer` model that has an `Address` embedded inside it.
2. Without RediSearch, you won't be able to use our expressive queries to find models -- just primary keys.
### So how do you get RediSearch and RedisJSON?
You can use RediSearch and RedisJSON with your self-hosted Redis deployment. Just follow the instructions on installing the binary versions of the modules in their Quick Start Guides:
- [RedisJSON Quick Start - Running Binaries](https://oss.redis.com/redisjson/#download-and-running-binaries)
- [RediSearch Quick Start - Running Binaries](https://oss.redis.com/redisearch/Quick_Start/#download_and_running_binaries)
**NOTE**: Both Quick Start Guides also have instructions on how to run these modules in Redis with Docker.
Don't want to run Redis yourself? RediSearch and RedisJSON are also available on Redis Cloud. [Get started here.](https://redis.com/try-free/)
## ❤️ Contributing ## ❤️ Contributing
@ -176,9 +333,9 @@ We'd love your contributions!
You can also **contribute documentation** -- or just let us know if something needs more detail. [Open an issue on GitHub](https://github.com/redis-om/redis-om-python/issues/new) to get started. You can also **contribute documentation** -- or just let us know if something needs more detail. [Open an issue on GitHub](https://github.com/redis-om/redis-om-python/issues/new) to get started.
## License ## 📝 License
Redis OM is [MIT licensed][license-url]. Redis OM uses the [BSD 3-Clause license][license-url].
<!-- Badges --> <!-- Badges -->
@ -188,7 +345,6 @@ Redis OM is [MIT licensed][license-url].
[ci-url]: https://github.com/redis-om/redis-om-python/actions/workflows/build.yml [ci-url]: https://github.com/redis-om/redis-om-python/actions/workflows/build.yml
[license-image]: http://img.shields.io/badge/license-MIT-green.svg?style=flat-square [license-image]: http://img.shields.io/badge/license-MIT-green.svg?style=flat-square
[license-url]: LICENSE [license-url]: LICENSE
<!-- Links --> <!-- Links -->
[redis-om-website]: https://developer.redis.com [redis-om-website]: https://developer.redis.com
@ -199,4 +355,4 @@ Redis OM is [MIT licensed][license-url].
[redis-json-url]: https://oss.redis.com/redisjson/ [redis-json-url]: https://oss.redis.com/redisjson/
[pydantic-url]: https://github.com/samuelcolvin/pydantic [pydantic-url]: https://github.com/samuelcolvin/pydantic
[ulid-url]: https://github.com/ulid/spec [ulid-url]: https://github.com/ulid/spec
[redis-enterprise-url]: https://redis.com/try-free/

15
aredis_om/__init__.py Normal file
View file

@ -0,0 +1,15 @@
from .checks import has_redis_json, has_redisearch
from .connections import get_redis_connection
from .model.migrations.migrator import MigrationError, Migrator
from .model.model import (
EmbeddedJsonModel,
Field,
FindQuery,
HashModel,
JsonModel,
NotFoundError,
QueryNotSupportedError,
QuerySyntaxError,
RedisModel,
RedisModelError,
)

28
aredis_om/checks.py Normal file
View file

@ -0,0 +1,28 @@
from functools import lru_cache
from typing import List
from aredis_om.connections import get_redis_connection
@lru_cache(maxsize=None)
async def get_modules(conn) -> List[str]:
modules = await conn.execute_command("module", "list")
return [m[1] for m in modules]
@lru_cache(maxsize=None)
async def has_redis_json(conn=None):
if conn is None:
conn = get_redis_connection()
names = await get_modules(conn)
return b"ReJSON" in names or "ReJSON" in names
@lru_cache(maxsize=None)
async def has_redisearch(conn=None):
if conn is None:
conn = get_redis_connection()
if has_redis_json(conn):
return True
names = await get_modules(conn)
return b"search" in names or "search" in names

View file

@ -1,28 +1,22 @@
import os import os
from typing import Union
import dotenv
import aioredis import aioredis
import redis import dotenv
from redis_om.unasync_util import ASYNC_MODE
dotenv.load_dotenv() dotenv.load_dotenv()
URL = os.environ.get("REDIS_OM_URL", None) URL = os.environ.get("REDIS_OM_URL", None)
if ASYNC_MODE:
client = aioredis.Redis
else:
client = redis.Redis
def get_redis_connection(**kwargs) -> Union[aioredis.Redis, redis.Redis]: def get_redis_connection(**kwargs) -> aioredis.Redis:
# If someone passed in a 'url' parameter, or specified a REDIS_OM_URL # If someone passed in a 'url' parameter, or specified a REDIS_OM_URL
# environment variable, we'll create the Redis client from the URL. # environment variable, we'll create the Redis client from the URL.
url = kwargs.pop("url", URL) url = kwargs.pop("url", URL)
if url: if url:
return client.from_url(url, **kwargs) return aioredis.Redis.from_url(url, **kwargs)
# Decode from UTF-8 by default # Decode from UTF-8 by default
if "decode_responses" not in kwargs: if "decode_responses" not in kwargs:
kwargs["decode_responses"] = True kwargs["decode_responses"] = True
return client(**kwargs) return aioredis.Redis(**kwargs)

View file

@ -1 +1,2 @@
from .migrations.migrator import MigrationError, Migrator
from .model import EmbeddedJsonModel, Field, HashModel, JsonModel, RedisModel from .model import EmbeddedJsonModel, Field, HashModel, JsonModel, RedisModel

View file

@ -1,10 +1,10 @@
import click import click
from redis_om.model.migrations.migrator import Migrator from aredis_om.model.migrations.migrator import Migrator
@click.command() @click.command()
@click.option("--module", default="redis_om") @click.option("--module", default="aredis_om")
def migrate(module): def migrate(module):
migrator = Migrator(module) migrator = Migrator(module)

View file

@ -2,12 +2,11 @@ import hashlib
import logging import logging
from dataclasses import dataclass from dataclasses import dataclass
from enum import Enum from enum import Enum
from typing import Optional, Union from typing import List, Optional
from redis import ResponseError, Redis from aioredis import Redis, ResponseError
from aioredis import ResponseError as AResponseError, Redis as ARedis
from redis_om.model.model import model_registry from aredis_om.model.model import model_registry
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -42,10 +41,10 @@ def schema_hash_key(index_name):
return f"{index_name}:hash" return f"{index_name}:hash"
async def create_index(redis: Union[Redis, ARedis], index_name, schema, current_hash): async def create_index(redis: Redis, index_name, schema, current_hash):
try: try:
await redis.execute_command(f"ft.info {index_name}") await redis.execute_command(f"ft.info {index_name}")
except (ResponseError, AResponseError): except ResponseError:
await redis.execute_command(f"ft.create {index_name} {schema}") await redis.execute_command(f"ft.create {index_name} {schema}")
await redis.set(schema_hash_key(index_name), current_hash) await redis.set(schema_hash_key(index_name), current_hash)
else: else:
@ -64,7 +63,7 @@ class IndexMigration:
schema: str schema: str
hash: str hash: str
action: MigrationAction action: MigrationAction
redis: Union[Redis, ARedis] redis: Redis
previous_hash: Optional[str] = None previous_hash: Optional[str] = None
async def run(self): async def run(self):
@ -87,9 +86,9 @@ class IndexMigration:
class Migrator: class Migrator:
def __init__(self, redis: Union[Redis, ARedis], module=None): def __init__(self, redis: Redis, module=None):
self.module = module self.module = module
self.migrations = [] self.migrations: List[IndexMigration] = []
self.redis = redis self.redis = redis
async def run(self): async def run(self):
@ -108,7 +107,7 @@ class Migrator:
try: try:
await self.redis.execute_command("ft.info", cls.Meta.index_name) await self.redis.execute_command("ft.info", cls.Meta.index_name)
except (ResponseError, AResponseError): except ResponseError:
self.migrations.append( self.migrations.append(
IndexMigration( IndexMigration(
name, name,
@ -116,12 +115,12 @@ class Migrator:
schema, schema,
current_hash, current_hash,
MigrationAction.CREATE, MigrationAction.CREATE,
self.redis self.redis,
) )
) )
continue continue
stored_hash = self.redis.get(hash_key) stored_hash = await self.redis.get(hash_key)
schema_out_of_date = current_hash != stored_hash schema_out_of_date = current_hash != stored_hash
if schema_out_of_date: if schema_out_of_date:
@ -134,7 +133,7 @@ class Migrator:
current_hash, current_hash,
MigrationAction.DROP, MigrationAction.DROP,
self.redis, self.redis,
stored_hash stored_hash,
) )
) )
self.migrations.append( self.migrations.append(
@ -145,7 +144,7 @@ class Migrator:
current_hash, current_hash,
MigrationAction.CREATE, MigrationAction.CREATE,
self.redis, self.redis,
stored_hash stored_hash,
) )
) )

View file

@ -10,6 +10,7 @@ from functools import reduce
from typing import ( from typing import (
AbstractSet, AbstractSet,
Any, Any,
AsyncGenerator,
Callable, Callable,
Dict, Dict,
List, List,
@ -28,21 +29,22 @@ from typing import (
) )
import aioredis import aioredis
import redis from aioredis.client import Pipeline
from pydantic import BaseModel, validator from pydantic import BaseModel, validator
from pydantic.fields import FieldInfo as PydanticFieldInfo from pydantic.fields import FieldInfo as PydanticFieldInfo
from pydantic.fields import ModelField, Undefined, UndefinedType from pydantic.fields import ModelField, Undefined, UndefinedType
from pydantic.main import ModelMetaclass from pydantic.main import ModelMetaclass, validate_model
from pydantic.typing import NoArgAnyCallable from pydantic.typing import NoArgAnyCallable
from pydantic.utils import Representation from pydantic.utils import Representation
from redis.client import Pipeline
from ulid import ULID from ulid import ULID
from redis_om.connections import get_redis_connection from ..checks import has_redis_json, has_redisearch
from ..connections import get_redis_connection
from ..unasync_util import ASYNC_MODE
from .encoders import jsonable_encoder from .encoders import jsonable_encoder
from .render_tree import render_tree from .render_tree import render_tree
from .token_escaper import TokenEscaper from .token_escaper import TokenEscaper
from ..unasync_util import ASYNC_MODE
model_registry = {} model_registry = {}
_T = TypeVar("_T") _T = TypeVar("_T")
@ -116,33 +118,52 @@ def is_supported_container_type(typ: Optional[type]) -> bool:
def validate_model_fields(model: Type["RedisModel"], field_values: Dict[str, Any]): def validate_model_fields(model: Type["RedisModel"], field_values: Dict[str, Any]):
for field_name in field_values.keys(): for field_name in field_values.keys():
if "__" in field_name:
obj = model
for sub_field in field_name.split("__"):
if not hasattr(obj, sub_field):
raise QuerySyntaxError(
f"The update path {field_name} contains a field that does not "
f"exit on {model.__name__}. The field is: {sub_field}"
)
obj = getattr(obj, sub_field)
return
if field_name not in model.__fields__: if field_name not in model.__fields__:
raise QuerySyntaxError( raise QuerySyntaxError(
f"The field {field_name} does not exist on the model {model.__name__}" f"The field {field_name} does not exist on the model {model.__name__}"
) )
class ExpressionProtocol(Protocol): def decode_redis_value(
op: Operators obj: Union[List[bytes], Dict[bytes, bytes], bytes], encoding: str
left: ExpressionOrModelField ) -> Union[List[str], Dict[str, str], str]:
right: ExpressionOrModelField """Decode a binary-encoded Redis hash into the specified encoding."""
if isinstance(obj, list):
return [v.decode(encoding) for v in obj]
if isinstance(obj, dict):
return {
key.decode(encoding): value.decode(encoding) for key, value in obj.items()
}
elif isinstance(obj, bytes):
return obj.decode(encoding)
def __invert__(self) -> "Expression":
pass
def __and__(self, other: ExpressionOrModelField): class PipelineError(Exception):
pass """A Redis pipeline error."""
def __or__(self, other: ExpressionOrModelField):
pass
@property def verify_pipeline_response(
def name(self) -> str: response: List[Union[bytes, str]], expected_responses: int = 0
raise NotImplementedError ):
# TODO: More generic pipeline verification here (what else is possible?),
@property # plus hash and JSON-specific verifications in separate functions.
def tree(self) -> str: actual_responses = len(response)
raise NotImplementedError if actual_responses != expected_responses:
raise PipelineError(
f"We expected {expected_responses}, but the Redis "
f"pipeline returned {actual_responses} responses."
)
@dataclasses.dataclass @dataclasses.dataclass
@ -318,6 +339,13 @@ class FindQuery:
page_size: int = DEFAULT_PAGE_SIZE, page_size: int = DEFAULT_PAGE_SIZE,
sort_fields: Optional[List[str]] = None, sort_fields: Optional[List[str]] = None,
): ):
if not has_redisearch(model.db()):
raise RedisModelError(
"Your Redis instance does not have either the RediSearch module "
"or RedisJSON module installed. Querying requires that your Redis "
"instance has one of these modules installed."
)
self.expressions = expressions self.expressions = expressions
self.model = model self.model = model
self.offset = offset self.offset = offset
@ -331,10 +359,10 @@ class FindQuery:
self._expression = None self._expression = None
self._query: Optional[str] = None self._query: Optional[str] = None
self._pagination: list[str] = [] self._pagination: List[str] = []
self._model_cache: list[RedisModel] = [] self._model_cache: List[RedisModel] = []
def dict(self) -> dict[str, Any]: def dict(self) -> Dict[str, Any]:
return dict( return dict(
model=self.model, model=self.model,
offset=self.offset, offset=self.offset,
@ -757,7 +785,7 @@ class FindQuery:
if pipeline: if pipeline:
# TODO: Response type? # TODO: Response type?
# TODO: Better error detection for transactions. # TODO: Better error detection for transactions.
pipeline.execute() await pipeline.execute()
async def delete(self): async def delete(self):
"""Delete all matching records in this query.""" """Delete all matching records in this query."""
@ -787,8 +815,10 @@ class FindQuery:
give it a new offset and limit: offset=n, limit=1. give it a new offset and limit: offset=n, limit=1.
""" """
if ASYNC_MODE: if ASYNC_MODE:
raise QuerySyntaxError("Cannot use [] notation with async code. " raise QuerySyntaxError(
"Use FindQuery.get_item() instead.") "Cannot use [] notation with async code. "
"Use FindQuery.get_item() instead."
)
if self._model_cache and len(self._model_cache) >= item: if self._model_cache and len(self._model_cache) >= item:
return self._model_cache[item] return self._model_cache[item]
@ -821,7 +851,7 @@ class FindQuery:
return result[0] return result[0]
class PrimaryKeyCreator(Protocol): class PrimaryKeyCreator(abc.ABC):
def create_pk(self, *args, **kwargs) -> str: def create_pk(self, *args, **kwargs) -> str:
"""Create a new primary key""" """Create a new primary key"""
@ -938,7 +968,7 @@ class PrimaryKey:
field: ModelField field: ModelField
class MetaProtocol(Protocol): class BaseMeta(abc.ABC):
global_key_prefix: str global_key_prefix: str
model_key_prefix: str model_key_prefix: str
primary_key_pattern: str primary_key_pattern: str
@ -948,6 +978,7 @@ class MetaProtocol(Protocol):
index_name: str index_name: str
abstract: bool abstract: bool
embedded: bool embedded: bool
encoding: str
@dataclasses.dataclass @dataclasses.dataclass
@ -961,16 +992,17 @@ class DefaultMeta:
global_key_prefix: Optional[str] = None global_key_prefix: Optional[str] = None
model_key_prefix: Optional[str] = None model_key_prefix: Optional[str] = None
primary_key_pattern: Optional[str] = None primary_key_pattern: Optional[str] = None
database: Optional[Union[redis.Redis, aioredis.Redis]] = None database: Optional[aioredis.Redis] = None
primary_key: Optional[PrimaryKey] = None primary_key: Optional[PrimaryKey] = None
primary_key_creator_cls: Optional[Type[PrimaryKeyCreator]] = None primary_key_creator_cls: Optional[Type[PrimaryKeyCreator]] = None
index_name: Optional[str] = None index_name: Optional[str] = None
abstract: Optional[bool] = False abstract: Optional[bool] = False
embedded: Optional[bool] = False embedded: Optional[bool] = False
encoding: str = "utf-8"
class ModelMeta(ModelMetaclass): class ModelMeta(ModelMetaclass):
_meta: MetaProtocol _meta: BaseMeta
def __new__(cls, name, bases, attrs, **kwargs): # noqa C901 def __new__(cls, name, bases, attrs, **kwargs): # noqa C901
meta = attrs.pop("Meta", None) meta = attrs.pop("Meta", None)
@ -1036,10 +1068,13 @@ class ModelMeta(ModelMetaclass):
new_class._meta.database = getattr( new_class._meta.database = getattr(
base_meta, "database", get_redis_connection() base_meta, "database", get_redis_connection()
) )
if not getattr(new_class._meta, "encoding", None):
new_class._meta.encoding = getattr(base_meta, "encoding")
if not getattr(new_class._meta, "primary_key_creator_cls", None): if not getattr(new_class._meta, "primary_key_creator_cls", None):
new_class._meta.primary_key_creator_cls = getattr( new_class._meta.primary_key_creator_cls = getattr(
base_meta, "primary_key_creator_cls", UlidPrimaryKey base_meta, "primary_key_creator_cls", UlidPrimaryKey
) )
# TODO: Configurable key separate, defaults to ":"
if not getattr(new_class._meta, "index_name", None): if not getattr(new_class._meta, "index_name", None):
new_class._meta.index_name = ( new_class._meta.index_name = (
f"{new_class._meta.global_key_prefix}:" f"{new_class._meta.global_key_prefix}:"
@ -1082,7 +1117,7 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
return await self.db().delete(self.key()) return await self.db().delete(self.key())
@classmethod @classmethod
async def get(cls, pk: Any) -> 'RedisModel': async def get(cls, pk: Any) -> "RedisModel":
raise NotImplementedError raise NotImplementedError
async def update(self, **field_values): async def update(self, **field_values):
@ -1092,7 +1127,7 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
async def save(self, pipeline: Optional[Pipeline] = None) -> "RedisModel": async def save(self, pipeline: Optional[Pipeline] = None) -> "RedisModel":
raise NotImplementedError raise NotImplementedError
@validator("pk", always=True) @validator("pk", always=True, allow_reuse=True)
def validate_pk(cls, v): def validate_pk(cls, v):
if not v: if not v:
v = cls._meta.primary_key_creator_cls().create_pk() v = cls._meta.primary_key_creator_cls().create_pk()
@ -1191,19 +1226,45 @@ class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
return d return d
@classmethod @classmethod
async def add(cls, models: Sequence["RedisModel"]) -> Sequence["RedisModel"]: async def add(
# TODO: Add transaction support cls,
return [await model.save() for model in models] models: Sequence["RedisModel"],
pipeline: Optional[Pipeline] = None,
pipeline_verifier: Callable[..., Any] = verify_pipeline_response,
) -> Sequence["RedisModel"]:
if pipeline is None:
# By default, send commands in a pipeline. Saving each model will
# be atomic, but Redis may process other commands in between
# these saves.
db = cls.db().pipeline(transaction=False)
else:
# If the user gave us a pipeline, add our commands to that. The user
# will be responsible for executing the pipeline after they've accumulated
# the commands they want to send.
db = pipeline
@classmethod for model in models:
def values(cls): # save() just returns the model, we don't need that here.
"""Return raw values from Redis instead of model instances.""" await model.save(pipeline=db)
raise NotImplementedError
# If the user didn't give us a pipeline, then we need to execute
# the one we just created.
if pipeline is None:
result = await db.execute()
pipeline_verifier(result, expected_responses=len(models))
return models
@classmethod @classmethod
def redisearch_schema(cls): def redisearch_schema(cls):
raise NotImplementedError raise NotImplementedError
def check(self):
"""Run all validations."""
*_, validation_error = validate_model(self.__class__, self.__dict__)
if validation_error:
raise validation_error
class HashModel(RedisModel, abc.ABC): class HashModel(RedisModel, abc.ABC):
def __init_subclass__(cls, **kwargs): def __init_subclass__(cls, **kwargs):
@ -1223,6 +1284,7 @@ class HashModel(RedisModel, abc.ABC):
) )
async def save(self, pipeline: Optional[Pipeline] = None) -> "HashModel": async def save(self, pipeline: Optional[Pipeline] = None) -> "HashModel":
self.check()
if pipeline is None: if pipeline is None:
db = self.db() db = self.db()
else: else:
@ -1232,12 +1294,39 @@ class HashModel(RedisModel, abc.ABC):
await db.hset(self.key(), mapping=document) await db.hset(self.key(), mapping=document)
return self return self
@classmethod
async def all_pks(cls) -> AsyncGenerator[str, None]: # type: ignore
key_prefix = cls.make_key(cls._meta.primary_key_pattern.format(pk=""))
# TODO: We assume the key ends with the default separator, ":" -- when
# we make the separator configurable, we need to update this as well.
# ... And probably lots of other places ...
#
# TODO: Also, we need to decide how we want to handle the lack of
# decode_responses=True...
return (
key.split(":")[-1]
if isinstance(key, str)
else key.decode(cls.Meta.encoding).split(":")[-1]
async for key in cls.db().scan_iter(f"{key_prefix}*", _type="HASH")
)
@classmethod @classmethod
async def get(cls, pk: Any) -> "HashModel": async def get(cls, pk: Any) -> "HashModel":
document = cls.db().hgetall(cls.make_primary_key(pk)) document = await cls.db().hgetall(cls.make_primary_key(pk))
if not document: if not document:
raise NotFoundError raise NotFoundError
return cls.parse_obj(document) try:
result = cls.parse_obj(document)
except TypeError as e:
log.warning(
f'Could not parse Redis response. Error was: "{e}". Probably, the '
"connection is not set to decode responses from bytes. "
"Attempting to decode response using the encoding set on "
f"model class ({cls.__class__}. Encoding: {cls.Meta.encoding}."
)
document = decode_redis_value(document, cls.Meta.encoding)
result = cls.parse_obj(document)
return result
@classmethod @classmethod
@no_type_check @no_type_check
@ -1260,6 +1349,12 @@ class HashModel(RedisModel, abc.ABC):
schema_parts = [schema_prefix] + cls.schema_for_fields() schema_parts = [schema_prefix] + cls.schema_for_fields()
return " ".join(schema_parts) return " ".join(schema_parts)
async def update(self, **field_values):
validate_model_fields(self.__class__, field_values)
for field, value in field_values.items():
setattr(self, field, value)
await self.save()
@classmethod @classmethod
def schema_for_fields(cls): def schema_for_fields(cls):
schema_parts = [] schema_parts = []
@ -1342,10 +1437,16 @@ class HashModel(RedisModel, abc.ABC):
class JsonModel(RedisModel, abc.ABC): class JsonModel(RedisModel, abc.ABC):
def __init_subclass__(cls, **kwargs): def __init_subclass__(cls, **kwargs):
if not has_redis_json(cls.db()):
log.error(
"Your Redis instance does not have the RedisJson module "
"loaded. JsonModel depends on RedisJson."
)
# Generate the RediSearch schema once to validate fields. # Generate the RediSearch schema once to validate fields.
cls.redisearch_schema() cls.redisearch_schema()
async def save(self, pipeline: Optional[Pipeline] = None) -> "JsonModel": async def save(self, pipeline: Optional[Pipeline] = None) -> "JsonModel":
self.check()
if pipeline is None: if pipeline is None:
db = self.db() db = self.db()
else: else:
@ -1357,7 +1458,25 @@ class JsonModel(RedisModel, abc.ABC):
async def update(self, **field_values): async def update(self, **field_values):
validate_model_fields(self.__class__, field_values) validate_model_fields(self.__class__, field_values)
for field, value in field_values.items(): for field, value in field_values.items():
setattr(self, field, value) # Handle the simple update case first, e.g. city="Happy Valley"
if "__" not in field:
setattr(self, field, value)
continue
# Handle the nested update field name case, e.g. address__city="Happy Valley"
obj = self
parts = field.split("__")
path_to_field = parts[:-1]
target_field = parts[-1]
# Get the final object in a nested update field name, e.g. for
# the string address__city, we want to get self.address.city
for sub_field in path_to_field:
obj = getattr(obj, sub_field)
# Set the target field (the last "part" of the nested update
# field name) to the target value.
setattr(obj, target_field, value)
await self.save() await self.save()
@classmethod @classmethod

View file

@ -1,7 +1,6 @@
from collections import Sequence from typing import List, Mapping
from typing import Any, Dict, List, Mapping, Union
from redis_om.model.model import Expression from aredis_om.model.model import Expression
class LogicalOperatorForListOfExpressions(Expression): class LogicalOperatorForListOfExpressions(Expression):

View file

@ -2,6 +2,7 @@
import inspect import inspect
_original_next = next _original_next = next

View file

@ -1,10 +0,0 @@
import unasync
def build(setup_kwargs):
setup_kwargs.update(
{"cmdclass": {'build_py': unasync.cmdclass_build_py(rules=[
unasync.Rule("/aredis_om/", "/redis_om/"),
unasync.Rule("/aredis_om/tests/", "/redis_om/tests/", additional_replacements={"aredis_om": "redis_om"}),
])}}
)

View file

@ -9,3 +9,11 @@ services:
- "6380:6379" - "6380:6379"
volumes: volumes:
- ./data:/data - ./data:/data
oss_redis:
image: "redis:latest"
restart: always
ports:
- "6381:6379"
volumes:
- ./oss_data:/oss_data

3
docs/connections.md Normal file
View file

@ -0,0 +1,3 @@
# Managing Connections
WIP!

54
docs/embedded_models.md Normal file
View file

@ -0,0 +1,54 @@
# Embedded Models
**NOTE:** This documentation is a stub, using the same embedded JSON model example as the README.
Redis OM can store and query **nested models** like any document database, with the speed and power you get from Redis. Let's see how this works.
In the next example, we'll define a new `Address` model and embed it within the `Customer` model.
```python
import datetime
from typing import Optional
from redis_om import (
EmbeddedJsonModel,
JsonModel,
Field,
Migrator
)
class Address(EmbeddedJsonModel):
address_line_1: str
address_line_2: Optional[str]
city: str = Field(index=True)
state: str = Field(index=True)
country: str
postal_code: str = Field(index=True)
class Customer(JsonModel):
first_name: str = Field(index=True)
last_name: str = Field(index=True)
email: str = Field(index=True)
join_date: datetime.date
age: int = Field(index=True)
bio: Optional[str] = Field(index=True, full_text_search=True,
default="")
# Creates an embedded model.
address: Address
# With these two models and a Redis deployment with the RedisJSON
# module installed, we can run queries like the following.
# Before running queries, we need to run migrations to set up the
# indexes that Redis OM will use. You can also use the `migrate`
# CLI tool for this!
Migrator().run()
# Find all customers who live in San Antonio, TX
Customer.find(Customer.address.city == "San Antonio",
Customer.address.state == "TX")
```

View file

@ -0,0 +1,3 @@
# Frequently Asked Questions (FAQ)
WIP!

134
docs/fastapi_integration.md Normal file
View file

@ -0,0 +1,134 @@
# FastAPI Integration
## Introduction
This section includes a complete example showing how to integrate Redis OM with FastAPI.
Good news: Redis OM was **specifically designed to integrate with FastAPI**!
## Concepts
### Every Redis OM Model is also a Pydantic model
Every Redis OM model is also a Pydantic model, so you can define a model and then use the model class anywhere that FastAPI expects a Pydantic model.
This means a couple of things:
1. A Redis OM model can be used for request body validation
2. Redis OM models show up in the auto-generated API documentation
### Cache vs. Data
Redis works well as either a durable data store or a cache, but the optimal Redis configuration is often different between these two use cases.
You almost always want to use a Redis instance tuned for caching when you're caching and a separate Redis instance tuned for data durability for storing application state.
This example shows how to manage these two uses of Redis within the same application. The app uses a FastAPI caching framework and dedicated caching instance of Redis for caching, and a separate Redis instance tuned for durability for Redis OM models.
## Example app code
This is a complete example that you can run as-is:
```python
import datetime
from typing import Optional
import aioredis
from fastapi import FastAPI, HTTPException
from starlette.requests import Request
from starlette.responses import Response
from fastapi_cache import FastAPICache
from fastapi_cache.backends.redis import RedisBackend
from fastapi_cache.decorator import cache
from pydantic import EmailStr
from redis_om import HashModel, NotFoundError
from redis_om import get_redis_connection
# This Redis instance is tuned for durability.
REDIS_DATA_URL = "redis://localhost:6380"
# This Redis instance is tuned for cache performance.
REDIS_CACHE_URL = "redis://localhost:6381"
class Customer(HashModel):
first_name: str
last_name: str
email: EmailStr
join_date: datetime.date
age: int
bio: Optional[str]
app = FastAPI()
@app.post("/customer")
async def save_customer(customer: Customer):
# We can save the model to Redis by calling `save()`:
return customer.save()
@app.get("/customers")
async def list_customers(request: Request, response: Response):
# To retrieve this customer with its primary key, we use `Customer.get()`:
return {"customers": Customer.all_pks()}
@app.get("/customer/{pk}")
@cache(expire=10)
async def get_customer(pk: str, request: Request, response: Response):
# To retrieve this customer with its primary key, we use `Customer.get()`:
try:
return Customer.get(pk)
except NotFoundError:
raise HTTPException(status_code=404, detail="Customer not found")
@app.on_event("startup")
async def startup():
r = aioredis.from_url(REDIS_CACHE_URL, encoding="utf8",
decode_responses=True)
FastAPICache.init(RedisBackend(r), prefix="fastapi-cache")
# You can set the Redis OM URL using the REDIS_OM_URL environment
# variable, or by manually creating the connection using your model's
# Meta object.
Customer.Meta.database = get_redis_connection(url=REDIS_DATA_URL,
decode_responses=True)
```
## Testing the app
You should install the app's dependencies first. This app uses Poetry, so you'll want to make sure you have that installed first:
$ pip install poetry
Then install the dependencies:
$ poetry install
Next, start the server:
$ poetry run uvicorn --reload main:test
Then, in another shell, create a customer:
$ curl -X POST "http://localhost:8000/customer" -H 'Content-Type: application/json' -d '{"first_name":"Andrew","last_name":"Brookins","email":"a@example.com","age":"38","join_date":"2020
-01-02"}'
{"pk":"01FM2G8EP38AVMH7PMTAJ123TA","first_name":"Andrew","last_name":"Brookins","email":"a@example.com","join_date":"2020-01-02","age":38,"bio":""}
Get a copy of the value for "pk" and make another request to get that customer:
$ curl "http://localhost:8000/customer/01FM2G8EP38AVMH7PMTAJ123TA"
{"pk":"01FM2G8EP38AVMH7PMTAJ123TA","first_name":"Andrew","last_name":"Brookins","email":"a@example.com","join_date":"2020-01-02","age":38,"bio":""}
You can also get a list of all customer PKs:
$ curl "http://localhost:8000/customers"
{"customers":["01FM2G8EP38AVMH7PMTAJ123TA"]}

View file

@ -0,0 +1,716 @@
# Getting Started With Redis OM
## Introduction
This tutorial will walk you through installing Redis OM, creating your first model, and using it to save and validate data.
## Prerequisites
Redis OM requires Python version 3.9 or above and a Redis instance to connect to.
## Python
Make sure you are running **Python version 3.9 or higher**:
```
python --version
Python 3.9.0
```
If you don't have Python installed, you can download it from [Python.org](https://www.python.org/downloads/), use [Pyenv](https://github.com/pyenv/pyenv), or install Python with your operating system's package manager.
## Redis
Redis OM saves data in Redis, so you will need Redis installed and running to complete this tutorial.
### Downloading Redis
The latest version of Redis is available from [Redis.io](https://redis.io/). You can also install Redis with your operating system's package manager.
**NOTE:** This tutorial will guide you through starting Redis locally, but the instructions will also work if Redis is running on a remote server.
### Installing Redis On Windows
Redis doesn't run directly on Windows, but you can use Windows Subsystem for Linux (WSL) to run Redis. See [our video on YouTube](https://youtu.be/_nFwPTHOMIY) for a walk-through.
Windows users can also use Docker. See the next section on running Redis with Docker for more information.
### Using Redis With Docker
Instead of installing Redis manually or with a package manager, you can run Redis with Docker.
We recommend the [redismod](https://hub.docker.com/r/redislabs/redismod) image because it includes Redis modules that Redis OM can use to give you extra features. Later sections of this guide will provide more detail about these features.
You can also use the official Redis Docker image, which is hosted on [Docker Hub](https://hub.docker.com/_/redis).
**NOTE**: We'll talk about how to actually start Redis with Docker when we discuss _running_ Redis later in this guide.
## Recommended: RediSearch and RedisJSON
Redis OM relies on the [RediSearch][redisearch-url] and [RedisJSON][redis-json-url] Redis modules to support [rich queries](querying.md) and [embedded models](embedded_models.md).
You don't need these Redis modules to use Redis OM's data modeling, validation, and persistence features, but we recommend them to get the most out of Redis OM.
The easiest way to run these Redis modules during local development is to use the [redismod](https://hub.docker.com/r/redislabs/redismod) Docker image.
For other installation methods, follow the "Quick Start" guides on both modules' home pages.
## Starting Redis
Before you get started with Redis OM, make sure you start Redis.
The command to start Redis will depend on how you installed it.
### Ubuntu Linux (Including WSL)
If you installed Redis using `apt`, start it with the `systemctl` command:
$ sudo systemctl restart redis.service
Otherwise, you can start the server manually:
$ redis-server start
### macOS with Homebrew
$ brew services start redis
### Docker
The command to start Redis with Docker depends on the image you've chosen to use.
**TIP:** The `-d` option in these examples runs Redis in the background, while `-p 6379:6379` makes Redis reachable at port 6379 on your localhost.
#### Docker with the `redismod` image (recommended)
$ docker run -d -p 6379:6379 redislabs/redismod
### Docker with the `redis` image
$ docker run -d -p 6379:6379 redis
## Installing Redis OM
The recommended way to install Redis OM is with [Poetry](https://python-poetry.org/docs/). You can install Redis OM using Poetry with the following command:
$ poetry install redis-om
If you're using Pipenv, the command is:
$ pipenv install redis-om
Finally, you can install Redis OM with `pip` by running the following command:
$ pip install redis-om
**TIP:** If you aren't using Poetry or Pipenv and are instead installing directly with `pip`, we recommend that you install Redis OM in a virtual environment (AKA, a virtualenv). If you aren't familiar with this concept, see [Dan Bader's video and transcript](https://realpython.com/lessons/creating-virtual-environment/).
## Setting the Redis URL Environment Variable
We're almost ready to create a Redis OM model! But first, we need to make sure that Redis OM knows how to connect to Redis.
By default, Redis OM tries to connect to Redis on your localhost at port 6379. Most local install methods will result in Redis running at this location, in which case you don't need to do anything special.
However, if you configured Redis to run on a different port, or if you're using a remote Redis server, you'll need to set the `REDIS_OM_URL` environment variable.
The `REDIS_OM_URL` environment variable follows the redis-py URL format:
redis://[[username]:[password]]@localhost:6379/[database number]
The default connection is equivalent to the following `REDIS_OM_URL` environment variable:
redis://@localhost:6379
**TIP:** Redis databases are numbered, and the default is 0. You can leave off the database number to use the default database.
Other supported prefixes include "rediss" for SSL connections and "unix" for Unix domain sockets:
rediss://[[username]:[password]]@localhost:6379/0
unix://[[username]:[password]]@/path/to/socket.sock?db=0
For more details about how to connect to Redis with Redis OM, see the [connections documentation](connections.md).
### Redis Cluster Support
Redis OM supports connecting to Redis Cluster, but this preview release does not support doing so with the `REDIS_OM_URL` environment variable. However, you can connect by manually creating a connection object.
See the [connections documentation](connections.md) for examples of connecting to Redis Cluster.
Support for connecting to Redis Cluster via `REDIS_OM_URL` will be added in a future release.
## Defining a Model
In this tutorial, we'll create a `Customer` model that validates and saves data. Let's start with a basic definition of the model. We'll add features as we go along.
```python
import datetime
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: str
```
There are a few details to note:
1. Our `Customer` model extends the `HashModel` class. This means that it will be saved to Redis as a hash. The other model class that Redis OM provides is `JsonModel`, which we'll discuss later.
2. We've specified the model's fields using Python type annotations.
Let's dig into these two details a bit more.
### The HashModel Class
When you subclass `HashModel`, your subclass is both a Redis OM model, with methods for saving data to Redis, *and* a Pydantic model.
This means that you can use Pydantic field validations with your Redis OM models, which we'll cover later, when we talk about validation. But this also means you can use Redis OM models anywhere you would use a Pydantic model, like in your FastAPI applications. 🤯
### Type Annotations
The type annotations you add to your model fields are used for a few purposes:
* Validating data with Pydantic validators
* Serializing data Redis
* Deserializing data from Redis
We'll see examples of these throughout the course of this tutorial.
An important detail about the `HashModel` class is that it does not support `list`, `set`, or mapping (like `dict`) types. This is because Redis hashes cannot contain lists, sets, or other hashes.
If you want to model fields with a list, set, or mapping type, or another model, you'll need to use the `JsonModel` class, which can support these types, as well as embedded models.
## Creating Models
Let's see what creating a model object looks like:
```python
import datetime
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: str
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38,
bio="Python developer, works at Redis, Inc."
)
```
### Optional Fields
What would happen if we left out one of these fields, like `bio`?
```python
import datetime
from redis_om import HashModel
from pydantic import ValidationError
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: str
# All fields are required because none of the fields
# are marked `Optional`, so we get a validation error:
try:
Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38 # <- We didn't pass in a bio!
)
except ValidationError as e:
print(e)
"""
ValidationError: 1 validation error for Customer
bio
field required (type=value_error.missing)
"""
```
If we want the `bio` field to be optional, we need to change the type annotation to use `Optional`.
```python
import datetime
from typing import Optional
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: Optional[str] # <- Now, bio is an Optional[str]
```
Now we can create `Customer` objects with or without the `bio` field.
### Default Values
Fields can have default values. You set them by assigning a value to a field.
```python
import datetime
from typing import Optional
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: Optional[str] = "Super dope" # <- We added a default here
```
Now, if we create a `Customer` object without a `bio` field, it will use the default value.
```python
import datetime
from typing import Optional
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: Optional[str] = "Super dope"
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38) # <- Notice, we didn't give a bio!
print(andrew.bio) # <- So we got the default value.
# > 'Super Dope'
```
The model will then save this default value to Redis the next time you call `save()`.
### Automatic Primary Keys
Models generate a globally unique primary key automatically without needing to talk to Redis.
```python
import datetime
from typing import Optional
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: Optional[str] = "Super dope"
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38)
print(andrew.pk)
# > '01FJM6PH661HCNNRC884H6K30C'
```
The ID is available *before* you save the model.
The default ID generation function creates [ULIDs](https://github.com/ulid/spec), though you can change the function that generates the primary key for models if you'd like to use a different kind of primary key.
## Validating Data
Redis OM uses [Pydantic][pydantic-url] to validate data based on the type annotations you assign to fields in a model class.
This validation ensures that fields like `first_name`, which the `Customer` model marked as a `str`, are always strings. **But every Redis OM model is also a Pydantic model**, so you can use Pydantic validators like `EmailStr`, `Pattern`, and many more for complex validations!
For example, we defined the `join_date` for our `Customer` model earlier as a `datetime.date`. So, if we try to create a model with a `join_date` that isn't a date, we'll get a validation error.
Let's try it now:
```python
import datetime
from typing import Optional
from redis_om import HashModel
from pydantic import ValidationError
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: Optional[str] = "Super dope"
try:
Customer(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date="not a date!", # <- The problem line!
age=38
)
except ValidationError as e:
print(e)
"""
pydantic.error_wrappers.ValidationError: 1 validation error for Customer
join_date
invalid date format (type=value_error.date)
"""
```
### Models Coerce Values By Default
You might wonder what qualifies as a "date" in our last validation example. By default, Redis OM will try to coerce input values to the correct type. That means we can pass a date string for `join_date` instead of a `date` object:
```python
import datetime
from typing import Optional
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date="2020-01-02", # <- We're passing a YYYY-MM-DD date string now
age=38
)
print(andrew.join_date)
# > 2021-11-02
type(andrew.join_date)
# > datetime.date # The model parsed the string automatically!
```
This ability to combine parsing (in this case, a YYYY-MM-DD date string) with validation can save you a lot of work.
However, you can turn off coercion -- check the next section on using strict validation.
### Strict Validation
You can turn on strict validation to reject values for a field unless they match the exact type of the model's type annotations.
You do this by changing a field's type annotation to use one of the ["strict" types provided by Pydantic](https://pydantic-docs.helpmanual.io/usage/types/#strict-types).
Redis OM supports all of Pydantic's strict types: `StrictStr`, `StrictBytes`, `StrictInt`, `StrictFloat`, and `StrictBool`.
If we wanted to make sure that the `age` field only accepts integers and doesn't try to parse a string containing an integer, like "1", we'd use the `StrictInt` class.
```python
import datetime
from typing import Optional
from pydantic import StrictInt, ValidationError
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: StrictInt # <- Instead of int, we use StrictInt
bio: Optional[str]
# Now if we use a string instead of an integer for `age`,
# we get a validation error:
try:
Customer(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date="2020-01-02", # <- A date as a string shouldn't work now!
age="38"
)
except ValidationError as e:
print(e)
"""
pydantic.error_wrappers.ValidationError: 1 validation error for Customer
join_date
Value must be a datetime.date object (type=value_error)
"""
```
Pydantic doesn't include a `StrictDate` class, but we can create our own. In this example, we create a `StrictDate` type that we'll use to validate that `join_date` is a `datetime.date` object.
```python
import datetime
from typing import Optional
from pydantic import ValidationError
from redis_om import HashModel
class StrictDate(datetime.date):
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
@classmethod
def validate(cls, value: datetime.date, **kwargs) -> datetime.date:
if not isinstance(value, datetime.date):
raise ValueError("Value must be a datetime.date object")
return value
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: StrictDate
age: int
bio: Optional[str]
# Now if we use a string instead of a date object for `join_date`,
# we get a validation error:
try:
Customer(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date="2020-01-02", # <- A string shouldn't work now!
age="38"
)
except ValidationError as e:
print(e)
"""
pydantic.error_wrappers.ValidationError: 1 validation error for Customer
join_date
Value must be a datetime.date object (type=value_error)
"""
```
## Saving Models
We can save the model to Redis by calling `save()`:
```python
import datetime
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38)
andrew.save()
```
## Examining Your Data In Redis
You can view the data stored in Redis for any Redis OM model.
First, get the key of a model instance you want to inspect. The `key()` method will give you the exact Redis key used to store the model.
**NOTE:** The naming of this method may be confusing. This is not the primary key, but is instead the Redis key for this model. For this reason, the method name may change.
In this example, we're looking at the key created for the `Customer` model we've been building:
```python
import datetime
from typing import Optional
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: Optional[str] = "Super dope"
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38)
andrew.save()
andrew.key()
# > 'mymodel.Customer:01FKGX1DFEV9Z2XKF59WQ6DC9T'
```
With the model's Redis key, you can start `redis-cli` and inspect the data stored under that key. Here, we run `JSON.GET` command with `redis-cli` using the running "redis" container that this project's Docker Compose file defines:
```
$ docker-compose exec -T redis redis-cli HGETALL mymodel.Customer:01FKGX1DFEV9Z2XKF59WQ6DC9r
1) "pk"
2) "01FKGX1DFEV9Z2XKF59WQ6DC9T"
3) "first_name"
4) "Andrew"
5) "last_name"
6) "Brookins"
7) "email"
8) "andrew.brookins@example.com"
9) "join_date"
10) "2021-11-02"
11) "age"
12) "38"
13) "bio"
14) "Super dope"
```
## Getting a Model
If you have the primary key of a model, you can call the `get()` method on the model class to get the model's data.
```python
import datetime
from typing import Optional
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: Optional[str] = "Super dope"
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38)
andrew.save()
assert Customer.get(andrew.pk) == andrew
```
## Querying for Models With Expressions
Redis OM comes with a rich query language that allows you to query Redis with Python expressions.
To show how this works, we'll make a small change to the `Customer` model we defined earlier. We'll add `Field(index=True)` to tell Redis OM that we want to index the `last_name` and `age` fields:
```python
import datetime
from typing import Optional
from pydantic import EmailStr
from redis_om import (
Field,
HashModel,
Migrator
)
class Customer(HashModel):
first_name: str
last_name: str = Field(index=True)
email: EmailStr
join_date: datetime.date
age: int = Field(index=True)
bio: Optional[str]
# Now, if we use this model with a Redis deployment that has the
# RediSearch module installed, we can run queries like the following.
# Before running queries, we need to run migrations to set up the
# indexes that Redis OM will use. You can also use the `migrate`
# CLI tool for this!
Migrator().run()
# Find all customers with the last name "Brookins"
Customer.find(Customer.last_name == "Brookins").all()
# Find all customers that do NOT have the last name "Brookins"
Customer.find(Customer.last_name != "Brookins").all()
# Find all customers whose last name is "Brookins" OR whose age is
# 100 AND whose last name is "Smith"
Customer.find((Customer.last_name == "Brookins") | (
Customer.age == 100
) & (Customer.last_name == "Smith")).all()
```
Many more types of queries are possible. learn more about querying with Redis OM, see the [documentation on querying](docs/querying.md).
## Next Steps
Now that you know the basics of working with Redis OM, continue on for all the nitty-gritty details about [models and fields](models_and_fields.md).
<!-- Links -->
[redisearch-url]: https://oss.redis.com/redisearch/
[redis-json-url]: https://oss.redis.com/redisjson/

View file

@ -0,0 +1,3 @@
# Redis OM Documentation
WIP!

View file

@ -0,0 +1,3 @@
# Integration Redis OM With Popular Frameworks
WIP!

31
docs/models_and_fields.md Normal file
View file

@ -0,0 +1,31 @@
# Models and Fields
**NOTE:** This documentation is a stub. Documentation for this project is a work in progress!
## Introduction
## Saving Data As Hashes With HashModel
### What Does Redis Store?
## Saving Data With JSON With JsonModel
### What Does Redis Store?
## Primary Keys
### Why Primary Keys Matter to Redis OM
### Using the Default Primary Key
### Using a Custom Primary Key
## Meta Classes
## Subclassing Models
### Subclassing and Meta Objects
## Saving Models

67
docs/querying.md Normal file
View file

@ -0,0 +1,67 @@
# Querying
**NOTE:** This documentation is a stub that uses examples from other documentation in this project (the README, the Getting Started guide, etc.). Detailed documentation on querying in a work in progress.
Querying in Redis OM uses a rich expression syntax inspired by the Django ORM, SQLAlchemy, and Peewee.
In the following example, we define `Address` and `Customer` models for use with a Redis database that has the [RedisJSON](redis-json-url) module installed.
With these two classes defined, we can query on any indexed fields in the models -- including indexed fields within embedded models.
```python
import datetime
from typing import Optional
from redis_om import (
EmbeddedJsonModel,
JsonModel,
Field,
Migrator
)
class Address(EmbeddedJsonModel):
address_line_1: str
address_line_2: Optional[str]
city: str = Field(index=True)
state: str = Field(index=True)
country: str
postal_code: str = Field(index=True)
class Customer(JsonModel):
first_name: str = Field(index=True)
last_name: str = Field(index=True)
email: str = Field(index=True)
join_date: datetime.date
age: int = Field(index=True)
bio: Optional[str] = Field(index=True, full_text_search=True,
default="")
# Creates an embedded model.
address: Address
# Before running queries, we need to run migrations to set up the
# indexes that Redis OM will use. You can also use the `migrate`
# CLI tool for this!
Migrator().run()
# Here are a few example queries that use these two models...
# Find all customers with the last name "Brookins"
Customer.find(Customer.last_name == "Brookins").all()
# Find all customers that do NOT have the last name "Brookins"
Customer.find(Customer.last_name != "Brookins").all()
# Find all customers whose last name is "Brookins" OR whose age is
# 100 AND whose last name is "Smith"
Customer.find((Customer.last_name == "Brookins") | (
Customer.age == 100
) & (Customer.last_name == "Smith")).all()
# Find all customers who live in San Antonio, TX
Customer.find(Customer.address.city == "San Antonio",
Customer.address.state == "TX")
```

30
docs/redis_modules.md Normal file
View file

@ -0,0 +1,30 @@
# Redis Modules
Some advanced features of Redis OM, like rich query expressions and saving data as JSON, rely on core features from two source available Redis modules: **RediSearch** and **RedisJSON**.
These modules are the "magic" behind the scenes:
* RediSearch adds querying, indexing, and full-text search to Redis
* RedisJSON adds the JSON data type to Redis
## Why this is important
Without RediSearch or RedisJSON installed, you can still use Redis OM to create declarative models backed by Redis.
We'll store your model data in Redis as Hashes, and you can retrieve models using their primary keys. You'll also get all the validation features from Pydantic.
So, what won't work without these modules?
1. Without RedisJSON, you won't be able to nest models inside each other, like we did with the example model of a `Customer` model that has an `Address` embedded inside it.
2. Without RediSearch, you won't be able to use our expressive queries to find models -- just primary keys.
## So how do you get RediSearch and RedisJSON?
You can use RediSearch and RedisJSON with your self-hosted Redis deployment. Just follow the instructions on installing the binary versions of the modules in their Quick Start Guides:
- [RedisJSON Quick Start - Running Binaries](https://oss.redis.com/redisjson/#download-and-running-binaries)
- [RediSearch Quick Start - Running Binaries](https://oss.redis.com/redisearch/Quick_Start/#download_and_running_binaries)
**NOTE**: Both of these modules' Quick Start Guides also have instructions on how to run the modules in Redis with Docker.
Don't want to run Redis yourself? RediSearch and RedisJSON are also available on Redis Cloud. [Get started here.](https://redis.com/try-free/)

5
docs/testing.md Normal file
View file

@ -0,0 +1,5 @@
# Testing Your Models
**NOTE:** This documentation is a Work in Progress.
Writing tests that use a Redis OM model requires some setup. For now, review the tests in the redis-om-python project for examples.

73
docs/validation.md Normal file
View file

@ -0,0 +1,73 @@
# Validation
Redis OM uses [Pydantic][pydantic-url] behind the scenes to validate data at runtime, based on the model's type annotations.
## Basic Type Validation
Validation works for basic type annotations like `str`. Thus, given the following model:
```python
class Customer(HashModel):
first_name: str
last_name: str
email: EmailStr
join_date: datetime.date
age: int
bio: Optional[str]
```
... Redis OM will ensure that `first_name` is always a string.
But every Redis OM model is also a Pydantic model, so you can use existing Pydantic validators like `EmailStr`, `Pattern`, and many more for complex validation!
## Complex Validation
Let's see what happens if we try to create a `Customer` object with an invalid email address.
```python
# We'll get a validation error if we try to use an invalid email address!
Customer(
first_name="Andrew",
last_name="Brookins",
email="Not an email address!",
join_date=datetime.date.today(),
age=38,
bio="Python developer, works at Redis, Inc."
)
```
This code generates the following error:
```
Traceback:
pydantic.error_wrappers.ValidationError: 1 validation error for Customer
email
value is not a valid email address (type=value_error.email)
```
We'll also get a validation error if we change a field on a model instance to an invalid value and then try to save it:
```python
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38,
bio="Python developer, works at Redis, Inc."
)
andrew.email = "Not valid"
andrew.save()
```
Once again, we get the valiation error:
```
Traceback:
pydantic.error_wrappers.ValidationError: 1 validation error for Customer
email
value is not a valid email address (type=value_error.email)
```
[pydantic-url]: https://github.com/samuelcolvin/pydantic

1
images/logo.svg Normal file
View file

@ -0,0 +1 @@
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1024 256"><defs><style>.cls-1{fill:#6d7278;}</style></defs><path class="cls-1" d="M67.57,100.37A48.25,48.25,0,0,1,84.88,88.31a55.61,55.61,0,0,1,20.29-4.48,18.67,18.67,0,0,1,13.89,4.48,13.88,13.88,0,0,1,4.22,10.53,15,15,0,0,1-4.22,10.53,13.64,13.64,0,0,1-10.53,4.53c-17.34,1.53-41.4,18.81-41.4,42.29v53.29a15.48,15.48,0,0,1-15.06,15,13.64,13.64,0,0,1-10.75-4.64,14.84,14.84,0,0,1-4.5-10.53V98.2a14.84,14.84,0,0,1,4.5-10.53,20,20,0,0,1,11.31-3.78,15.34,15.34,0,0,1,15,15Z"/><path class="cls-1" d="M250.15,149.27a14.55,14.55,0,0,1-3.75,9.83,17,17,0,0,1-10.53,4.51H147a28.61,28.61,0,0,0,9.64,18.22,40.17,40.17,0,0,0,26.34,12.7,39.08,39.08,0,0,0,29.34-6.78,15.35,15.35,0,0,1,11.31-4.53,12.91,12.91,0,0,1,9.75,3.78,12.25,12.25,0,0,1,.75,17.31l0,0c-.22.25-.47.47-.72.72a68,68,0,0,1-48.87,18.06,63.87,63.87,0,0,1-48.82-21.06,74.15,74.15,0,0,1-20.29-49.74,67.59,67.59,0,0,1,20.29-49.68,62.8,62.8,0,0,1,47.4-20.31,61.13,61.13,0,0,1,46.51,18.84,68.82,68.82,0,0,1,21.15,46.51v1.48Zm-67-39.87a39.48,39.48,0,0,0-25.37,9.8,36.67,36.67,0,0,0-12,19.45h79a34.41,34.41,0,0,0-13.56-18.78,48.06,48.06,0,0,0-27.78-10.58"/><path class="cls-1" d="M391.72,208a15.47,15.47,0,0,1-15,15.06A16.24,16.24,0,0,1,361.63,211,64.54,64.54,0,0,1,323.2,223a65.45,65.45,0,0,1-48.15-20.31A67.05,67.05,0,0,1,255.46,153a71.53,71.53,0,0,1,19.59-49.65A66.27,66.27,0,0,1,323.2,82.22a62.07,62.07,0,0,1,37.59,12.06V41.68a13.7,13.7,0,0,1,4.53-10.53A15,15,0,0,1,376,26.62a13.68,13.68,0,0,1,10.53,4.53,14.87,14.87,0,0,1,4.53,10.53V208Zm-67.66-15.06a33.35,33.35,0,0,0,26.31-12,40,40,0,0,0,0-55.57,36.05,36.05,0,0,0-26.31-12.17,32.33,32.33,0,0,0-26.34,12.06,40,40,0,0,0,0,55.57,34.71,34.71,0,0,0,26.34,12"/><path class="cls-1" d="M436.2,41.68v6.06a15.29,15.29,0,0,1-4.64,11.45A12.9,12.9,0,0,1,421,62.94a13.52,13.52,0,0,1-10.5-4.47A15.12,15.12,0,0,1,406,47.19V41.63a15.49,15.49,0,0,1,15-15.06,13.9,13.9,0,0,1,10.53,4.58,13.58,13.58,0,0,1,4.61,10.53M410.81,87.61a14.69,14.69,0,0,1,10.53-4.53,13.49,13.49,0,0,1,10.53,4.53,15.24,15.24,0,0,1,4.3,11.12V208.78a12.65,12.65,0,0,1-4.61,10.54,14.81,14.81,0,0,1-10.5,4.52,13.59,13.59,0,0,1-10.53-4.52A14.69,14.69,0,0,1,406,208.78V98.84a15,15,0,0,1,4.53-11.12"/><path class="cls-1" d="M542.93,98.14a14.73,14.73,0,0,1,6,9,19.88,19.88,0,0,1-1.5,11.28,15,15,0,0,1-9,6,15.57,15.57,0,0,1-11.3-2.25c-10.54-6.75-18.76-10.51-25.57-10.51a24.33,24.33,0,0,0-16.67,5.23c-3,2.28-4.5,3.78-4.5,5.28a5.57,5.57,0,0,0,.75,3.78,10.5,10.5,0,0,0,3,2.25A45.1,45.1,0,0,0,504.47,135h0a105,105,0,0,1,27.06,8.33,38.91,38.91,0,0,1,7,67.57l-.24.15a60.58,60.58,0,0,1-36.12,11.28c-17.31,0-33.84-6.75-49.65-19.45a16,16,0,0,1-5.28-9.81,14.45,14.45,0,0,1,23.34-12.67,43.7,43.7,0,0,0,30.84,12,32.23,32.23,0,0,0,18.81-5.28c3-3,5.28-5.25,5.28-7.53s0-3-.75-3.75c0-.78-1.53-2.25-3.78-3a56,56,0,0,0-22.59-8h0a78.31,78.31,0,0,1-25.56-8.33,46,46,0,0,1-18.06-15.76,35.06,35.06,0,0,1-4.47-17.31,36.93,36.93,0,0,1,16.53-30.87,60.68,60.68,0,0,1,34.59-10.53c12.67.78,26.34,6,41.37,15.81"/><path class="cls-1" d="M582.16,155.49q0-35.07,19.54-54.61t54.41-19.53q35.75,0,55.09,19.19t19.34,53.78q0,25.1-8.45,41.17a60.15,60.15,0,0,1-24.42,25q-16,8.94-39.81,8.94-24.23,0-40.09-7.71T592,197.3Q582.16,180.59,582.16,155.49Zm44.25.19q0,21.69,8.06,31.16t21.93,9.48q14.26,0,22.08-9.28t7.81-33.31q0-20.22-8.16-29.55T656,114.85q-13.38,0-21.49,9.48T626.41,155.68Z"/><path class="cls-1" d="M749,32.13h86l33.19,118.56L901.18,32.13h86V227H933.6V78.38L892.31,227H843.8L802.59,78.38V227H749Z"/><rect class="cls-1" x="583.86" y="32.52" width="144.48" height="36.12"/></svg>

After

Width:  |  Height:  |  Size: 3.5 KiB

32
make_sync.py Normal file
View file

@ -0,0 +1,32 @@
import os
from pathlib import Path
import unasync
def main():
additional_replacements = {
"aredis_om": "redis_om",
"aioredis": "redis"
}
rules = [
unasync.Rule(
fromdir="/aredis_om/",
todir="/redis_om/",
additional_replacements=additional_replacements,
),
]
filepaths = []
for root, _, filenames in os.walk(
Path(__file__).absolute().parent / "aredis_om"
):
for filename in filenames:
if filename.rpartition(".")[-1] in ("py", "pyi",):
filepaths.append(os.path.join(root, filename))
unasync.unasync_files(filepaths, rules)
if __name__ == "__main__":
main()

1025
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,13 +1,28 @@
[tool.poetry] [tool.poetry]
name = "redis-om" name = "redis-om"
version = "0.1.0" version = "0.0.11"
description = "A high-level library containing useful Redis abstractions and tools, like an ORM and leaderboard." description = "A high-level library containing useful Redis abstractions and tools, like an ORM and leaderboard."
authors = ["Andrew Brookins <andrew.brookins@redislabs.com>"] authors = ["Andrew Brookins <andrew.brookins@redis.com>"]
license = "MIT" maintainers = ["Andrew Brookins <andrew.brookins@redis.com>"]
build = "build.py" license = "BSD-3-Clause"
readme = "README.md"
repository = "https://github.com/redis-developer/redis-om-python"
packages = [
{ "include" = "aredis_om" },
{ "include" = "redis_om" },
]
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Database :: Front-Ends",
]
include=[
"docs/*",
"images/*",
]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8" python = "^3.7"
redis = "^3.5.3" redis = "^3.5.3"
aioredis = "^2.0.0" aioredis = "^2.0.0"
pydantic = "^1.8.2" pydantic = "^1.8.2"
@ -33,9 +48,13 @@ pytest-cov = "^3.0.0"
pytest-xdist = "^2.4.0" pytest-xdist = "^2.4.0"
unasync = "^0.5.0" unasync = "^0.5.0"
pytest-asyncio = "^0.16.0" pytest-asyncio = "^0.16.0"
twine = "^3.4.2"
email-validator = "^1.1.3"
tox = "^3.24.4"
tox-pyenv = "^1.1.0"
[tool.poetry.scripts] [tool.poetry.scripts]
migrate = "redis_om.orm.cli.migrate:migrate" migrate = "redis_om.model.cli.migrate:migrate"
[build-system] [build-system]
requires = ["poetry-core>=1.0.0"] requires = ["poetry-core>=1.0.0"]

View file

@ -1,31 +0,0 @@
import abc
from typing import Optional
from redis_om.model.model import HashModel, JsonModel
class BaseJsonModel(JsonModel, abc.ABC):
class Meta:
global_key_prefix = "redis-om"
class BaseHashModel(HashModel, abc.ABC):
class Meta:
global_key_prefix = "redis-om"
# class AddressJson(BaseJsonModel):
# address_line_1: str
# address_line_2: Optional[str]
# city: str
# country: str
# postal_code: str
#
class AddressHash(BaseHashModel):
address_line_1: str
address_line_2: Optional[str]
city: str
country: str
postal_code: str

47
setup.py Normal file

File diff suppressed because one or more lines are too long

View file

@ -1,12 +1,24 @@
import asyncio
import random import random
import pytest import pytest
from redis_om.connections import get_redis_connection from aredis_om import get_redis_connection
@pytest.fixture @pytest.fixture(scope="session")
def redis(event_loop): def event_loop(request):
"""
Starlette needs a session-scoped event loop during test runs.
https://github.com/pytest-dev/pytest-asyncio/issues/169
"""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
@pytest.fixture(scope="session")
def redis():
yield get_redis_connection() yield get_redis_connection()

View file

@ -8,20 +8,24 @@ from unittest import mock
import pytest import pytest
from pydantic import ValidationError from pydantic import ValidationError
from redis_om.model import Field, HashModel from aredis_om import (
from redis_om.model.migrations.migrator import Migrator Field,
from redis_om.model.model import ( HashModel,
NotFoundError, Migrator,
QueryNotSupportedError, QueryNotSupportedError,
RedisModelError, RedisModelError,
has_redisearch,
) )
if not has_redisearch():
pytestmark = pytest.mark.skip
today = datetime.date.today() today = datetime.date.today()
@pytest.fixture @pytest.fixture
def m(key_prefix): async def m(key_prefix, redis):
class BaseHashModel(HashModel, abc.ABC): class BaseHashModel(HashModel, abc.ABC):
class Meta: class Meta:
global_key_prefix = key_prefix global_key_prefix = key_prefix
@ -42,7 +46,7 @@ def m(key_prefix):
model_key_prefix = "member" model_key_prefix = "member"
primary_key_pattern = "" primary_key_pattern = ""
Migrator().run() await Migrator(redis).run()
return namedtuple("Models", ["BaseHashModel", "Order", "Member"])( return namedtuple("Models", ["BaseHashModel", "Order", "Member"])(
BaseHashModel, Order, Member BaseHashModel, Order, Member
@ -50,7 +54,7 @@ def m(key_prefix):
@pytest.fixture @pytest.fixture
def members(m): async def members(m):
member1 = m.Member( member1 = m.Member(
first_name="Andrew", first_name="Andrew",
last_name="Brookins", last_name="Brookins",
@ -74,149 +78,32 @@ def members(m):
age=100, age=100,
join_date=today, join_date=today,
) )
member1.save() await member1.save()
member2.save() await member2.save()
member3.save() await member3.save()
yield member1, member2, member3 yield member1, member2, member3
def test_validates_required_fields(m): @pytest.mark.asyncio
# Raises ValidationError: last_name is required async def test_exact_match_queries(members, m):
with pytest.raises(ValidationError):
m.Member(first_name="Andrew", zipcode="97086", join_date=today)
def test_validates_field(m):
# Raises ValidationError: join_date is not a date
with pytest.raises(ValidationError):
m.Member(first_name="Andrew", last_name="Brookins", join_date="yesterday")
# Passes validation
def test_validation_passes(m):
member = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
age=38,
)
assert member.first_name == "Andrew"
def test_saves_model_and_creates_pk(m):
member = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
age=38,
)
# Save a model instance to Redis
member.save()
member2 = m.Member.get(member.pk)
assert member2 == member
def test_raises_error_with_embedded_models(m):
class Address(m.BaseHashModel):
address_line_1: str
address_line_2: Optional[str]
city: str
country: str
postal_code: str
with pytest.raises(RedisModelError):
class InvalidMember(m.BaseHashModel):
address: Address
@pytest.mark.skip("Not implemented yet")
def test_saves_many(m):
members = [
m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
),
m.Member(
first_name="Kim",
last_name="Brookins",
email="k@example.com",
join_date=today,
),
]
m.Member.add(members)
@pytest.mark.skip("Not ready yet")
def test_updates_a_model(members, m):
member1, member2, member3 = members member1, member2, member3 = members
# Or, with an implicit save: actual = await m.Member.find(m.Member.last_name == "Brookins").sort_by("age").all()
member1.update(last_name="Smith")
assert m.Member.find(m.Member.pk == member1.pk).first() == member1
# Or, affecting multiple model instances with an implicit save:
m.Member.find(m.Member.last_name == "Brookins").update(last_name="Smith")
results = m.Member.find(m.Member.last_name == "Smith")
assert results == members
def test_paginate_query(members, m):
member1, member2, member3 = members
actual = m.Member.find().sort_by("age").all(batch_size=1)
assert actual == [member2, member1, member3]
def test_access_result_by_index_cached(members, m):
member1, member2, member3 = members
query = m.Member.find().sort_by("age")
# Load the cache, throw away the result.
assert query._model_cache == []
query.execute()
assert query._model_cache == [member2, member1, member3]
# Access an item that should be in the cache.
with mock.patch.object(query.model, "db") as mock_db:
assert query[0] == member2
assert not mock_db.called
def test_access_result_by_index_not_cached(members, m):
member1, member2, member3 = members
query = m.Member.find().sort_by("age")
# Assert that we don't have any models in the cache yet -- we
# haven't made any requests of Redis.
assert query._model_cache == []
assert query[0] == member2
assert query[1] == member1
assert query[2] == member3
def test_exact_match_queries(members, m):
member1, member2, member3 = members
actual = m.Member.find(m.Member.last_name == "Brookins").sort_by("age").all()
assert actual == [member2, member1] assert actual == [member2, member1]
actual = m.Member.find( actual = await m.Member.find(
(m.Member.last_name == "Brookins") & ~(m.Member.first_name == "Andrew") (m.Member.last_name == "Brookins") & ~(m.Member.first_name == "Andrew")
).all() ).all()
assert actual == [member2] assert actual == [member2]
actual = m.Member.find(~(m.Member.last_name == "Brookins")).all() actual = await m.Member.find(~(m.Member.last_name == "Brookins")).all()
assert actual == [member3] assert actual == [member3]
actual = m.Member.find(m.Member.last_name != "Brookins").all() actual = await m.Member.find(m.Member.last_name != "Brookins").all()
assert actual == [member3] assert actual == [member3]
actual = ( actual = await (
m.Member.find( m.Member.find(
(m.Member.last_name == "Brookins") & (m.Member.first_name == "Andrew") (m.Member.last_name == "Brookins") & (m.Member.first_name == "Andrew")
| (m.Member.first_name == "Kim") | (m.Member.first_name == "Kim")
@ -226,16 +113,17 @@ def test_exact_match_queries(members, m):
) )
assert actual == [member2, member1] assert actual == [member2, member1]
actual = m.Member.find( actual = await m.Member.find(
m.Member.first_name == "Kim", m.Member.last_name == "Brookins" m.Member.first_name == "Kim", m.Member.last_name == "Brookins"
).all() ).all()
assert actual == [member2] assert actual == [member2]
def test_recursive_query_resolution(members, m): @pytest.mark.asyncio
async def test_recursive_query_resolution(members, m):
member1, member2, member3 = members member1, member2, member3 = members
actual = ( actual = await (
m.Member.find( m.Member.find(
(m.Member.last_name == "Brookins") (m.Member.last_name == "Brookins")
| (m.Member.age == 100) & (m.Member.last_name == "Smith") | (m.Member.age == 100) & (m.Member.last_name == "Smith")
@ -246,10 +134,11 @@ def test_recursive_query_resolution(members, m):
assert actual == [member2, member1, member3] assert actual == [member2, member1, member3]
def test_tag_queries_boolean_logic(members, m): @pytest.mark.asyncio
async def test_tag_queries_boolean_logic(members, m):
member1, member2, member3 = members member1, member2, member3 = members
actual = ( actual = await (
m.Member.find( m.Member.find(
(m.Member.first_name == "Andrew") & (m.Member.last_name == "Brookins") (m.Member.first_name == "Andrew") & (m.Member.last_name == "Brookins")
| (m.Member.last_name == "Smith") | (m.Member.last_name == "Smith")
@ -260,7 +149,8 @@ def test_tag_queries_boolean_logic(members, m):
assert actual == [member1, member3] assert actual == [member1, member3]
def test_tag_queries_punctuation(m): @pytest.mark.asyncio
async def test_tag_queries_punctuation(m):
member1 = m.Member( member1 = m.Member(
first_name="Andrew, the Michael", first_name="Andrew, the Michael",
last_name="St. Brookins-on-Pier", last_name="St. Brookins-on-Pier",
@ -268,7 +158,7 @@ def test_tag_queries_punctuation(m):
age=38, age=38,
join_date=today, join_date=today,
) )
member1.save() await member1.save()
member2 = m.Member( member2 = m.Member(
first_name="Bob", first_name="Bob",
@ -277,24 +167,26 @@ def test_tag_queries_punctuation(m):
age=38, age=38,
join_date=today, join_date=today,
) )
member2.save() await member2.save()
assert ( result = await (m.Member.find(m.Member.first_name == "Andrew, the Michael").first())
m.Member.find(m.Member.first_name == "Andrew, the Michael").first() == member1 assert result == member1
)
assert ( result = await (m.Member.find(m.Member.last_name == "St. Brookins-on-Pier").first())
m.Member.find(m.Member.last_name == "St. Brookins-on-Pier").first() == member1 assert result == member1
)
# Notice that when we index and query multiple values that use the internal # Notice that when we index and query multiple values that use the internal
# TAG separator for single-value exact-match fields, like an indexed string, # TAG separator for single-value exact-match fields, like an indexed string,
# the queries will succeed. We apply a workaround that queries for the union # the queries will succeed. We apply a workaround that queries for the union
# of the two values separated by the tag separator. # of the two values separated by the tag separator.
assert m.Member.find(m.Member.email == "a|b@example.com").all() == [member1] results = await m.Member.find(m.Member.email == "a|b@example.com").all()
assert m.Member.find(m.Member.email == "a|villain@example.com").all() == [member2] assert results == [member1]
results = await m.Member.find(m.Member.email == "a|villain@example.com").all()
assert results == [member2]
def test_tag_queries_negation(members, m): @pytest.mark.asyncio
async def test_tag_queries_negation(members, m):
member1, member2, member3 = members member1, member2, member3 = members
""" """
@ -304,7 +196,7 @@ def test_tag_queries_negation(members, m):
""" """
query = m.Member.find(~(m.Member.first_name == "Andrew")) query = m.Member.find(~(m.Member.first_name == "Andrew"))
assert query.all() == [member2] assert await query.all() == [member2]
""" """
first_name first_name
@ -319,7 +211,7 @@ def test_tag_queries_negation(members, m):
query = m.Member.find( query = m.Member.find(
~(m.Member.first_name == "Andrew") & (m.Member.last_name == "Brookins") ~(m.Member.first_name == "Andrew") & (m.Member.last_name == "Brookins")
) )
assert query.all() == [member2] assert await query.all() == [member2]
""" """
first_name first_name
@ -338,7 +230,7 @@ def test_tag_queries_negation(members, m):
~(m.Member.first_name == "Andrew") ~(m.Member.first_name == "Andrew")
& ((m.Member.last_name == "Brookins") | (m.Member.last_name == "Smith")) & ((m.Member.last_name == "Brookins") | (m.Member.last_name == "Smith"))
) )
assert query.all() == [member2] assert await query.all() == [member2]
""" """
first_name first_name
@ -357,64 +249,184 @@ def test_tag_queries_negation(members, m):
~(m.Member.first_name == "Andrew") & (m.Member.last_name == "Brookins") ~(m.Member.first_name == "Andrew") & (m.Member.last_name == "Brookins")
| (m.Member.last_name == "Smith") | (m.Member.last_name == "Smith")
) )
assert query.sort_by("age").all() == [member2, member3] assert await query.sort_by("age").all() == [member2, member3]
actual = m.Member.find( actual = await m.Member.find(
(m.Member.first_name == "Andrew") & ~(m.Member.last_name == "Brookins") (m.Member.first_name == "Andrew") & ~(m.Member.last_name == "Brookins")
).all() ).all()
assert actual == [member3] assert actual == [member3]
def test_numeric_queries(members, m): @pytest.mark.asyncio
async def test_numeric_queries(members, m):
member1, member2, member3 = members member1, member2, member3 = members
actual = m.Member.find(m.Member.age == 34).all() actual = await m.Member.find(m.Member.age == 34).all()
assert actual == [member2] assert actual == [member2]
actual = m.Member.find(m.Member.age > 34).sort_by("age").all() actual = await m.Member.find(m.Member.age > 34).sort_by("age").all()
assert actual == [member1, member3] assert actual == [member1, member3]
actual = m.Member.find(m.Member.age < 35).all() actual = await m.Member.find(m.Member.age < 35).all()
assert actual == [member2] assert actual == [member2]
actual = m.Member.find(m.Member.age <= 34).all() actual = await m.Member.find(m.Member.age <= 34).all()
assert actual == [member2] assert actual == [member2]
actual = m.Member.find(m.Member.age >= 100).all() actual = await m.Member.find(m.Member.age >= 100).all()
assert actual == [member3] assert actual == [member3]
actual = m.Member.find(m.Member.age != 34).sort_by("age").all() actual = await m.Member.find(m.Member.age != 34).sort_by("age").all()
assert actual == [member1, member3] assert actual == [member1, member3]
actual = m.Member.find(~(m.Member.age == 100)).sort_by("age").all() actual = await m.Member.find(~(m.Member.age == 100)).sort_by("age").all()
assert actual == [member2, member1] assert actual == [member2, member1]
actual = m.Member.find(m.Member.age > 30, m.Member.age < 40).sort_by("age").all() actual = (
await m.Member.find(m.Member.age > 30, m.Member.age < 40).sort_by("age").all()
)
assert actual == [member2, member1] assert actual == [member2, member1]
def test_sorting(members, m): @pytest.mark.asyncio
async def test_sorting(members, m):
member1, member2, member3 = members member1, member2, member3 = members
actual = m.Member.find(m.Member.age > 34).sort_by("age").all() actual = await m.Member.find(m.Member.age > 34).sort_by("age").all()
assert actual == [member1, member3] assert actual == [member1, member3]
actual = m.Member.find(m.Member.age > 34).sort_by("-age").all() actual = await m.Member.find(m.Member.age > 34).sort_by("-age").all()
assert actual == [member3, member1] assert actual == [member3, member1]
with pytest.raises(QueryNotSupportedError): with pytest.raises(QueryNotSupportedError):
# This field does not exist. # This field does not exist.
m.Member.find().sort_by("not-a-real-field").all() await m.Member.find().sort_by("not-a-real-field").all()
with pytest.raises(QueryNotSupportedError): with pytest.raises(QueryNotSupportedError):
# This field is not sortable. # This field is not sortable.
m.Member.find().sort_by("join_date").all() await m.Member.find().sort_by("join_date").all()
def test_not_found(m): def test_validates_required_fields(m):
with pytest.raises(NotFoundError): # Raises ValidationError: last_name is required
# This ID does not exist. # TODO: Test the error value
m.Member.get(1000) with pytest.raises(ValidationError):
m.Member(first_name="Andrew", zipcode="97086", join_date=today)
def test_validates_field(m):
# Raises ValidationError: join_date is not a date
# TODO: Test the error value
with pytest.raises(ValidationError):
m.Member(first_name="Andrew", last_name="Brookins", join_date="yesterday")
def test_validation_passes(m):
member = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
age=38,
)
assert member.first_name == "Andrew"
@pytest.mark.asyncio
async def test_saves_model_and_creates_pk(m):
member = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
age=38,
)
# Save a model instance to Redis
await member.save()
member2 = await m.Member.get(member.pk)
assert member2 == member
def test_raises_error_with_embedded_models(m):
class Address(m.BaseHashModel):
address_line_1: str
address_line_2: Optional[str]
city: str
country: str
postal_code: str
with pytest.raises(RedisModelError):
class InvalidMember(m.BaseHashModel):
address: Address
@pytest.mark.asyncio
async def test_saves_many(m):
member1 = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
age=38,
)
member2 = m.Member(
first_name="Kim",
last_name="Brookins",
email="k@example.com",
join_date=today,
age=34,
)
members = [member1, member2]
result = await m.Member.add(members)
assert result == [member1, member2]
assert await m.Member.get(pk=member1.pk) == member1
assert await m.Member.get(pk=member2.pk) == member2
@pytest.mark.asyncio
async def test_updates_a_model(members, m):
member1, member2, member3 = members
await member1.update(last_name="Smith")
member = await m.Member.get(member1.pk)
assert member.last_name == "Smith"
@pytest.mark.asyncio
async def test_paginate_query(members, m):
member1, member2, member3 = members
actual = await m.Member.find().sort_by("age").all(batch_size=1)
assert actual == [member2, member1, member3]
@pytest.mark.asyncio
async def test_access_result_by_index_cached(members, m):
member1, member2, member3 = members
query = m.Member.find().sort_by("age")
# Load the cache, throw away the result.
assert query._model_cache == []
await query.execute()
assert query._model_cache == [member2, member1, member3]
# Access an item that should be in the cache.
with mock.patch.object(query.model, "db") as mock_db:
assert await query.get_item(0) == member2
assert not mock_db.called
@pytest.mark.asyncio
async def test_access_result_by_index_not_cached(members, m):
member1, member2, member3 = members
query = m.Member.find().sort_by("age")
# Assert that we don't have any models in the cache yet -- we
# haven't made any requests of Redis.
assert query._model_cache == []
assert await query.get_item(0) == member2
assert await query.get_item(1) == member1
assert await query.get_item(2) == member3
def test_schema(m, key_prefix): def test_schema(m, key_prefix):

View file

@ -1,5 +1,4 @@
import abc import abc
import asyncio
import datetime import datetime
import decimal import decimal
from collections import namedtuple from collections import namedtuple
@ -9,15 +8,21 @@ from unittest import mock
import pytest import pytest
from pydantic import ValidationError from pydantic import ValidationError
from redis_om.model import EmbeddedJsonModel, Field, JsonModel from aredis_om import (
from redis_om.model.migrations.migrator import Migrator EmbeddedJsonModel,
from redis_om.model.model import ( Field,
JsonModel,
Migrator,
NotFoundError, NotFoundError,
QueryNotSupportedError, QueryNotSupportedError,
RedisModelError, RedisModelError,
has_redis_json,
) )
if not has_redis_json():
pytestmark = pytest.mark.skip
today = datetime.date.today() today = datetime.date.today()
@ -177,28 +182,66 @@ async def test_saves_model_and_creates_pk(address, m, redis):
assert member2.address == address assert member2.address == address
@pytest.mark.skip("Not implemented yet")
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_saves_many(address, m): async def test_saves_many_implicit_pipeline(address, m):
members = [ member1 = m.Member(
m.Member( first_name="Andrew",
first_name="Andrew", last_name="Brookins",
last_name="Brookins", email="a@example.com",
email="a@example.com", join_date=today,
join_date=today, address=address,
address=address, age=38,
age=38, )
), member2 = m.Member(
m.Member( first_name="Kim",
first_name="Kim", last_name="Brookins",
last_name="Brookins", email="k@example.com",
email="k@example.com", join_date=today,
join_date=today, address=address,
address=address, age=34,
age=34, )
), members = [member1, member2]
] result = await m.Member.add(members)
m.Member.add(members) assert result == [member1, member2]
assert await m.Member.get(pk=member1.pk) == member1
assert await m.Member.get(pk=member2.pk) == member2
@pytest.mark.asyncio
async def test_saves_many_explicit_transaction(address, m):
member1 = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
address=address,
age=38,
)
member2 = m.Member(
first_name="Kim",
last_name="Brookins",
email="k@example.com",
join_date=today,
address=address,
age=34,
)
members = [member1, member2]
result = await m.Member.add(members)
assert result == [member1, member2]
assert await m.Member.get(pk=member1.pk) == member1
assert await m.Member.get(pk=member2.pk) == member2
# Test the explicit pipeline path -- here, we add multiple Members
# using a single Redis transaction, with MULTI/EXEC.
async with m.Member.db().pipeline(transaction=True) as pipeline:
await m.Member.add(members, pipeline=pipeline)
assert result == [member1, member2]
assert await pipeline.execute() == ["OK", "OK"]
assert await m.Member.get(pk=member1.pk) == member1
assert await m.Member.get(pk=member2.pk) == member2
async def save(members): async def save(members):
@ -207,25 +250,19 @@ async def save(members):
return members return members
@pytest.mark.skip("Not ready yet")
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_updates_a_model(members, m): async def test_updates_a_model(members, m):
member1, member2, member3 = await save(members) member1, member2, member3 = await save(members)
# Or, with an implicit save: # Update a field directly on the model
member1.update(last_name="Smith") await member1.update(last_name="Apples to oranges")
assert m.Member.find(m.Member.pk == member1.pk).first() == member1 member = await m.Member.get(member1.pk)
assert member.last_name == "Apples to oranges"
# Or, affecting multiple model instances with an implicit save: # Update a field in an embedded model
m.Member.find(m.Member.last_name == "Brookins").update(last_name="Smith") await member2.update(address__city="Happy Valley")
results = m.Member.find(m.Member.last_name == "Smith") member = await m.Member.get(member2.pk)
assert results == members assert member.address.city == "Happy Valley"
# Or, updating a field in an embedded model:
member2.update(address__city="Happy Valley")
assert (
m.Member.find(m.Member.pk == member2.pk).first().address.city == "Happy Valley"
)
@pytest.mark.asyncio @pytest.mark.asyncio
@ -246,7 +283,7 @@ async def test_access_result_by_index_cached(members, m):
# Access an item that should be in the cache. # Access an item that should be in the cache.
with mock.patch.object(query.model, "db") as mock_db: with mock.patch.object(query.model, "db") as mock_db:
assert query[0] == member2 assert await query.get_item(0) == member2
assert not mock_db.called assert not mock_db.called
@ -258,9 +295,9 @@ async def test_access_result_by_index_not_cached(members, m):
# Assert that we don't have any models in the cache yet -- we # Assert that we don't have any models in the cache yet -- we
# haven't made any requests of Redis. # haven't made any requests of Redis.
assert query._model_cache == [] assert query._model_cache == []
assert query.get_item(0) == member2 assert await query.get_item(0) == member2
assert query.get_item(1) == member1 assert await query.get_item(1) == member1
assert query.get_item(2) == member3 assert await query.get_item(2) == member3
@pytest.mark.asyncio @pytest.mark.asyncio
@ -274,7 +311,6 @@ async def test_in_query(members, m):
assert actual == [member2, member1, member3] assert actual == [member2, member1, member3]
@pytest.mark.skip("Not implemented yet")
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_update_query(members, m): async def test_update_query(members, m):
member1, member2, member3 = members member1, member2, member3 = members
@ -286,8 +322,8 @@ async def test_update_query(members, m):
.sort_by("age") .sort_by("age")
.all() .all()
) )
assert actual == [member1, member2, member3] assert len(actual) == 3
assert all([m.name == "Bobby" for m in actual]) assert all([m.first_name == "Bobby" for m in actual])
@pytest.mark.asyncio @pytest.mark.asyncio
@ -323,7 +359,9 @@ async def test_exact_match_queries(members, m):
).all() ).all()
assert actual == [member2] assert actual == [member2]
actual = await m.Member.find(m.Member.address.city == "Portland").sort_by("age").all() actual = (
await m.Member.find(m.Member.address.city == "Portland").sort_by("age").all()
)
assert actual == [member2, member1, member3] assert actual == [member2, member1, member3]
@ -349,7 +387,9 @@ async def test_recursive_query_field_resolution(members, m):
description="Weird house", created_on=datetime.datetime.now() description="Weird house", created_on=datetime.datetime.now()
) )
await member1.save() await member1.save()
actual = await m.Member.find(m.Member.address.note.description == "Weird house").all() actual = await m.Member.find(
m.Member.address.note.description == "Weird house"
).all()
assert actual == [member1] assert actual == [member1]
member1.orders = [ member1.orders = [
@ -416,10 +456,12 @@ async def test_tag_queries_punctuation(address, m):
await member2.save() await member2.save()
assert ( assert (
await m.Member.find(m.Member.first_name == "Andrew, the Michael").first() == member1 await m.Member.find(m.Member.first_name == "Andrew, the Michael").first()
== member1
) )
assert ( assert (
await m.Member.find(m.Member.last_name == "St. Brookins-on-Pier").first() == member1 await m.Member.find(m.Member.last_name == "St. Brookins-on-Pier").first()
== member1
) )
# Notice that when we index and query multiple values that use the internal # Notice that when we index and query multiple values that use the internal
@ -427,7 +469,9 @@ async def test_tag_queries_punctuation(address, m):
# the queries will succeed. We apply a workaround that queries for the union # the queries will succeed. We apply a workaround that queries for the union
# of the two values separated by the tag separator. # of the two values separated by the tag separator.
assert await m.Member.find(m.Member.email == "a|b@example.com").all() == [member1] assert await m.Member.find(m.Member.email == "a|b@example.com").all() == [member1]
assert await m.Member.find(m.Member.email == "a|villain@example.com").all() == [member2] assert await m.Member.find(m.Member.email == "a|villain@example.com").all() == [
member2
]
@pytest.mark.asyncio @pytest.mark.asyncio
@ -509,7 +553,7 @@ async def test_numeric_queries(members, m):
actual = await m.Member.find(m.Member.age == 34).all() actual = await m.Member.find(m.Member.age == 34).all()
assert actual == [member2] assert actual == [member2]
actual = await m.Member.find(m.Member.age > 34).all() actual = await m.Member.find(m.Member.age > 34).sort_by("age").all()
assert actual == [member1, member3] assert actual == [member1, member3]
actual = await m.Member.find(m.Member.age < 35).all() actual = await m.Member.find(m.Member.age < 35).all()
@ -524,7 +568,9 @@ async def test_numeric_queries(members, m):
actual = await m.Member.find(~(m.Member.age == 100)).sort_by("age").all() actual = await m.Member.find(~(m.Member.age == 100)).sort_by("age").all()
assert actual == [member2, member1] assert actual == [member2, member1]
actual = await m.Member.find(m.Member.age > 30, m.Member.age < 40).sort_by("age").all() actual = (
await m.Member.find(m.Member.age > 30, m.Member.age < 40).sort_by("age").all()
)
assert actual == [member2, member1] assert actual == [member2, member1]
actual = await m.Member.find(m.Member.age != 34).sort_by("age").all() actual = await m.Member.find(m.Member.age != 34).sort_by("age").all()

View file

@ -0,0 +1,175 @@
import abc
import datetime
import decimal
from collections import namedtuple
from typing import Optional
import pytest
from pydantic import ValidationError
from aredis_om import HashModel, Migrator, NotFoundError, RedisModelError
today = datetime.date.today()
@pytest.fixture
async def m(key_prefix, redis):
class BaseHashModel(HashModel, abc.ABC):
class Meta:
global_key_prefix = key_prefix
class Order(BaseHashModel):
total: decimal.Decimal
currency: str
created_on: datetime.datetime
class Member(BaseHashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
class Meta:
model_key_prefix = "member"
primary_key_pattern = ""
await Migrator(redis).run()
return namedtuple("Models", ["BaseHashModel", "Order", "Member"])(
BaseHashModel, Order, Member
)
@pytest.fixture
async def members(m):
member1 = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
age=38,
join_date=today,
)
member2 = m.Member(
first_name="Kim",
last_name="Brookins",
email="k@example.com",
age=34,
join_date=today,
)
member3 = m.Member(
first_name="Andrew",
last_name="Smith",
email="as@example.com",
age=100,
join_date=today,
)
await member1.save()
await member2.save()
await member3.save()
yield member1, member2, member3
@pytest.mark.asyncio
async def test_all_keys(members, m):
pks = sorted([pk async for pk in await m.Member.all_pks()])
assert len(pks) == 3
assert pks == sorted([m.pk for m in members])
@pytest.mark.asyncio
async def test_not_found(m):
with pytest.raises(NotFoundError):
# This ID does not exist.
await m.Member.get(1000)
def test_validates_required_fields(m):
# Raises ValidationError: last_name is required
# TODO: Test the error value
with pytest.raises(ValidationError):
m.Member(first_name="Andrew", zipcode="97086", join_date=today)
def test_validates_field(m):
# Raises ValidationError: join_date is not a date
# TODO: Test the error value
with pytest.raises(ValidationError):
m.Member(first_name="Andrew", last_name="Brookins", join_date="yesterday")
def test_validation_passes(m):
member = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
age=38,
)
assert member.first_name == "Andrew"
@pytest.mark.asyncio
async def test_saves_model_and_creates_pk(m):
member = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
age=38,
)
# Save a model instance to Redis
await member.save()
member2 = await m.Member.get(member.pk)
assert member2 == member
def test_raises_error_with_embedded_models(m):
class Address(m.BaseHashModel):
address_line_1: str
address_line_2: Optional[str]
city: str
country: str
postal_code: str
with pytest.raises(RedisModelError):
class InvalidMember(m.BaseHashModel):
address: Address
@pytest.mark.asyncio
async def test_saves_many(m):
member1 = m.Member(
first_name="Andrew",
last_name="Brookins",
email="a@example.com",
join_date=today,
age=38,
)
member2 = m.Member(
first_name="Kim",
last_name="Brookins",
email="k@example.com",
join_date=today,
age=34,
)
members = [member1, member2]
result = await m.Member.add(members)
assert result == [member1, member2]
assert await m.Member.get(pk=member1.pk) == member1
assert await m.Member.get(pk=member2.pk) == member2
@pytest.mark.asyncio
async def test_updates_a_model(members, m):
member1, member2, member3 = members
await member1.update(last_name="Smith")
member = await m.Member.get(member1.pk)
assert member.last_name == "Smith"

View file

@ -0,0 +1,49 @@
import abc
import datetime
from collections import namedtuple
import pytest
from pydantic import EmailStr, ValidationError
from aredis_om import Field, HashModel, Migrator
today = datetime.date.today()
@pytest.fixture
async def m(key_prefix, redis):
class BaseHashModel(HashModel, abc.ABC):
class Meta:
global_key_prefix = key_prefix
class Member(BaseHashModel):
first_name: str
last_name: str
email: EmailStr = Field(index=True)
join_date: datetime.date
age: int
await Migrator(redis).run()
return namedtuple("Models", ["Member"])(Member)
def test_email_str(m):
with pytest.raises(ValidationError):
m.Member(
first_name="Andrew",
last_name="Brookins",
email="not an email!",
age=38,
join_date=today,
)
with pytest.raises(ValidationError):
m.Member(
first_name="Andrew",
last_name="Brookins",
email="andrew@bad-domain",
age=38,
join_date=today,
)

9
tox.ini Normal file
View file

@ -0,0 +1,9 @@
[tox]
skipsdist = true
envlist = py37, py38, py39, py310
[testenv]
whitelist_externals = poetry
commands =
poetry install -v
poetry run pytest