Final docs push

This commit is contained in:
Andrew Brookins 2021-11-24 18:12:27 -08:00
parent 321b356140
commit 269d44c26e
13 changed files with 442 additions and 48 deletions

View file

@ -113,7 +113,7 @@ andrew = Customer(
# The model generates a globally unique primary key automatically
# without needing to talk to Redis.
print(andrew.pk)
# > '01FJM6PH661HCNNRC884H6K30C'
# > "01FJM6PH661HCNNRC884H6K30C"
# We can save the model to Redis by calling `save()`:
andrew.save()
@ -199,7 +199,6 @@ from redis_om import (
HashModel,
Migrator
)
from redis_om import get_redis_connection
class Customer(HashModel):
@ -217,8 +216,7 @@ class Customer(HashModel):
# Before running queries, we need to run migrations to set up the
# indexes that Redis OM will use. You can also use the `migrate`
# CLI tool for this!
redis = get_redis_connection()
Migrator(redis).run()
Migrator().run()
# Find all customers with the last name "Brookins"
Customer.find(Customer.last_name == "Brookins").all()
@ -253,7 +251,6 @@ from redis_om import (
Field,
Migrator,
)
from redis_om import get_redis_connection
class Address(EmbeddedJsonModel):
@ -284,8 +281,7 @@ class Customer(JsonModel):
# Before running queries, we need to run migrations to set up the
# indexes that Redis OM will use. You can also use the `migrate`
# CLI tool for this!
redis = get_redis_connection()
Migrator(redis).run()
Migrator().run()
# Find all customers who live in San Antonio, TX
Customer.find(Customer.address.city == "San Antonio",

View file

@ -7,6 +7,7 @@ from aredis_om.model.migrations.migrator import Migrator
@click.option("--module", default="aredis_om")
def migrate(module):
migrator = Migrator(module)
migrator.detect_migrations()
if migrator.migrations:
print("Pending migrations:")

View file

@ -68,6 +68,15 @@ def jsonable_encoder(
include = set(include)
if exclude is not None and not isinstance(exclude, (set, dict)):
exclude = set(exclude)
if custom_encoder:
if type(obj) in custom_encoder:
return custom_encoder[type(obj)](obj)
else:
for encoder_type, encoder in custom_encoder.items():
if isinstance(obj, encoder_type):
return encoder(obj)
if isinstance(obj, BaseModel):
encoder = getattr(obj.__config__, "json_encoders", {})
if custom_encoder:
@ -145,13 +154,9 @@ def jsonable_encoder(
)
return encoded_list
if custom_encoder:
if type(obj) in custom_encoder:
return custom_encoder[type(obj)](obj)
else:
for encoder_type, encoder in custom_encoder.items():
if isinstance(obj, encoder_type):
return encoder(obj)
# This function originally called custom encoders here,
# which meant we couldn't override the encoder for many
# types hard-coded into this function (lists, etc.).
if type(obj) in ENCODERS_BY_TYPE:
return ENCODERS_BY_TYPE[type(obj)](obj)

View file

@ -84,12 +84,11 @@ class IndexMigration:
class Migrator:
def __init__(self, redis: Redis, module=None):
def __init__(self, module=None):
self.module = module
self.migrations: List[IndexMigration] = []
self.redis = redis
async def run(self):
async def detect_migrations(self):
# Try to load any modules found under the given path or module name.
if self.module:
import_submodules(self.module)
@ -100,6 +99,7 @@ class Migrator:
for name, cls in model_registry.items():
hash_key = schema_hash_key(cls.Meta.index_name)
redis = cls.db()
try:
schema = cls.redisearch_schema()
except NotImplementedError:
@ -108,7 +108,7 @@ class Migrator:
current_hash = hashlib.sha1(schema.encode("utf-8")).hexdigest() # nosec
try:
await self.redis.execute_command("ft.info", cls.Meta.index_name)
await redis.execute_command("ft.info", cls.Meta.index_name)
except ResponseError:
self.migrations.append(
IndexMigration(
@ -117,12 +117,12 @@ class Migrator:
schema,
current_hash,
MigrationAction.CREATE,
self.redis,
redis,
)
)
continue
stored_hash = await self.redis.get(hash_key)
stored_hash = await redis.get(hash_key)
schema_out_of_date = current_hash != stored_hash
if schema_out_of_date:
@ -134,7 +134,7 @@ class Migrator:
schema,
current_hash,
MigrationAction.DROP,
self.redis,
redis,
stored_hash,
)
)
@ -145,12 +145,14 @@ class Migrator:
schema,
current_hash,
MigrationAction.CREATE,
self.redis,
redis,
stored_hash,
)
)
async def run(self):
# TODO: Migration history
# TODO: Dry run with output
await self.detect_migrations()
for migration in self.migrations:
await migration.run()

View file

@ -107,6 +107,7 @@ def embedded(cls):
def is_supported_container_type(typ: Optional[type]) -> bool:
# TODO: Wait, why don't we support indexing sets?
if typ == list or typ == tuple:
return True
unwrapped = get_origin(typ)
@ -479,8 +480,7 @@ class FindQuery:
if isinstance(value, str):
return escaper.escape(value)
if isinstance(value, bytes):
# TODO: We don't decode and then escape bytes objects passed as input.
# Should we?
# TODO: We don't decode bytes objects passed as input. Should we?
# TODO: TAG indexes fail on JSON arrays of numbers -- only strings
# are allowed -- what happens if we save an array of bytes?
return value
@ -966,7 +966,7 @@ class PrimaryKey:
field: ModelField
class BaseMeta(abc.ABC):
class BaseMeta(Protocol):
global_key_prefix: str
model_key_prefix: str
primary_key_pattern: str
@ -974,7 +974,6 @@ class BaseMeta(abc.ABC):
primary_key: PrimaryKey
primary_key_creator_cls: Type[PrimaryKeyCreator]
index_name: str
abstract: bool
embedded: bool
encoding: str
@ -994,7 +993,6 @@ class DefaultMeta:
primary_key: Optional[PrimaryKey] = None
primary_key_creator_cls: Optional[Type[PrimaryKeyCreator]] = None
index_name: Optional[str] = None
abstract: Optional[bool] = False
embedded: Optional[bool] = False
encoding: str = "utf-8"
@ -1269,17 +1267,23 @@ class HashModel(RedisModel, abc.ABC):
super().__init_subclass__(**kwargs)
for name, field in cls.__fields__.items():
origin = get_origin(field.outer_type_)
if origin:
for typ in (Set, Mapping, List):
if issubclass(origin, typ):
raise RedisModelError(
f"HashModels cannot index set, list,"
f" or mapping fields. Field: {name}"
)
if issubclass(field.outer_type_, RedisModel):
raise RedisModelError(
f"HashModels cannot have embedded model " f"fields. Field: {name}"
f"HashModels cannot index embedded model fields. Field: {name}"
)
elif dataclasses.is_dataclass(field.outer_type_):
raise RedisModelError(
f"HashModels cannot index dataclass fields. Field: {name}"
)
for typ in (Set, Mapping, List):
if issubclass(field.outer_type_, typ):
raise RedisModelError(
f"HashModels cannot have set, list,"
f" or mapping fields. Field: {name}"
)
async def save(self, pipeline: Optional[Pipeline] = None) -> "HashModel":
self.check()
@ -1360,6 +1364,8 @@ class HashModel(RedisModel, abc.ABC):
for name, field in cls.__fields__.items():
# TODO: Merge this code with schema_for_type()?
_type = field.outer_type_
is_subscripted_type = get_origin(_type)
if getattr(field.field_info, "primary_key", None):
if issubclass(_type, str):
redisearch_field = (
@ -1372,7 +1378,12 @@ class HashModel(RedisModel, abc.ABC):
schema_parts.append(redisearch_field)
elif getattr(field.field_info, "index", None) is True:
schema_parts.append(cls.schema_for_type(name, _type, field.field_info))
elif is_supported_container_type(_type):
elif is_subscripted_type:
# Ignore subscripted types (usually containers!) that we don't
# support, for the purposes of indexing.
if not is_supported_container_type(_type):
continue
embedded_cls = get_args(_type)
if not embedded_cls:
# TODO: Test if this can really happen.

View file

@ -658,7 +658,6 @@ from pydantic import EmailStr
from redis_om import (
Field,
get_redis_connection,
HashModel,
Migrator
)
@ -679,8 +678,7 @@ class Customer(HashModel):
# Before running queries, we need to run migrations to set up the
# indexes that Redis OM will use. You can also use the `migrate`
# CLI tool for this!
redis = get_redis_connection()
Migrator(redis).run()
Migrator().run()
# Find all customers with the last name "Brookins"
Customer.find(Customer.last_name == "Brookins").all()

View file

@ -1,6 +1,6 @@
# Redis OM for Python
Welcome! This is the index of documentation for redis-om-python.
Welcome! This is the documentation for redis-om-python.
**NOTE**: The documentation is a bit sparse at the moment but will continue to grow!
@ -12,6 +12,10 @@ Read the Getting Started tutorial at [getting_started.md](getting_started.md).
Read about connecting to Redis at [connections.md](connections.md).
## Models and Fields
Learn all about how to create model instances and define fields in [models.md](models.md).
## Validating Data
Read about how to use Redis OM models to validate data at [validation.md](validation.md)

273
docs/models.md Normal file
View file

@ -0,0 +1,273 @@
# Models and Fields
The heart of Redis OM's object mapping, validation, and querying features is a
pair of declarative models: `HashModel` and `JsonModel`. Both models work
provide roughly the same API, but they store data in Redis differently.
This page will explain how to create your Redis OM model by subclassing one of
these classes.
## HashModel vs. JsonModel
First, which should you use?
The choice is relatively simple. If you want to embed a model inside another
model, like giving a `Customer` model a list of `Order` models, then you need to
use `JsonModel`. Only `JsonModel` supports embedded models.
Otherwise, use `HashModel`.
## Creating Your Model
You create a Redis OM model by subclassing `HashModel` or `JsonModel`. For
example:
```python
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
```
## Configuring Models
There are several Redis OM-specific settings you can configure in models. You
configure these settings using a special object called the _Meta object_.
Here is an example of using the Meta object to set a global key prefix:
```python
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
class Meta:
global_key_prefix = "customer-dashboard"
```
## Abstract Models
You can create abstract Redis OM models by subclassing `ABC` in addition to
either `HashModel` or `JsonModel`. Abstract models exist only to gather shared
configuration for subclasses -- you can't instantiate them.
One use of abstract models is to configure a Redis key prefix that all models in
your application will use. This is a good best practice with Redis. Here's how
you'd do it with an abstract model:
```python
from abc import ABC
from redis_om import HashModel
class BaseModel(HashModel, ABC):
class Meta:
global_key_prefix = "your-application"
```
### The Meta Object Is "Special"
The Meta object has a special property: if you create a model subclass from a base class that has a Meta object, Redis OM copies the parent's fields into the Meta object in the child class.
Because of this, a subclass can override a single field in its parent's Meta class without having to redefine all fields.
An example will make this clearer:
```python
from abc import ABC
from redis_om import HashModel, get_redis_connection
redis = get_redis_connection(port=6380)
other_redis = get_redis_connection(port=6381)
class BaseModel(HashModel, ABC):
class Meta:
global_key_prefix = "customer-dashboard"
database = redis
class Customer(BaseModel):
first_name: str
last_name: str
class Meta:
database = other_redis
print(Customer.global_key_prefix)
# > "customer-dashboard"
```
In this example, we created an abstract base model called `BaseModel` and gave it a Meta object containing a database connection and a global key prefix.
Then we created a subclass `BaseModel` called `Customer` and gave it a second Meta object, but only defined `database`. `Customer` _also gets the global key prefix_ that `BaseModel` defined ("customer-dashboard").
While this is not how object inheritance usually works in Python, we think it is helpful to make abstract models more useful, especially as a way to group shared model settings.
### All Settings Supported by the Meta Object
Here is a table of the settings available in the Meta object and what they control.
| Setting | Description | Default |
| ----------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------- |
| global_key_prefix | A string prefix applied to every Redis key that the model manages. This could be something like your application's name. | "" |
| model_key_prefix | A string prefix applied to the Redis key representing every model. For example, the Redis Hash key for a HashModel. This prefix is also added to the redisearch index created for every model with indexed fields. | "" |
| primary_key_pattern | A format string producing the base string for a Redis key representing this model. This string should accept a "pk" format argument. **Note:** This is a "new style" format string, which will be called with `.format()`. | "{pk} |
| database | An aioredis.Redis or redis.Redis client instance that the model will use to communicate with Redis. | A new instance created with connections.get_redis_connection(). |
| primary_key_creator_cls | A class that adheres to the PrimaryKeyCreator protocol, which Redis OM will use to create a primary key for a new model instance. | UlidPrimaryKey |
| index_name | The RediSearch index name to use for this model. Only used if at least one of the model's fields are marked as indexable (`index=True`). | "{global_key_prefix}:{model_key_prefix}:index" |
| embedded | Whether or not this model is "embedded." Embedded models are not included in migrations that create and destroy indexes. Instead, their indexed fields are included in the index for the parent model. **Note**: Only `JsonModel` can have embedded models. | False |
| encoding | The default encoding to use for strings. This encoding is given to redis-py or aioredis at the connection level. In both cases, Redis OM will decode binary strings from Redis using your chosen encoding. | "utf-8" |
## Configuring Pydantic
Every Redis OM model is also a Pydantic model, so in addition to configuring Redis OM behavior with the Meta object, you can control Pydantic configuration via the Config object within a model class.
See the [Pydantic documentation for details](https://pydantic-docs.helpmanual.io/usage/model_config/) on how this object works and the settings that are available.
The default Pydantic configuration for models, which Redis OM sets for you, is equivalent to the following (demonstrated on an actual model):
```python
from redis_om import HashModel
class Customer(HashModel):
# ... Fields ...
class Config:
orm_mode = True
arbitrary_types_allowed = True
extra = "allow"
```
Some features may not work correctly if you change these settings.
## Fields
You define fields on a Redis OM model using Python _type annotations_. If you
aren't familiar with type annotations, check out this
[tutorial](https://towardsdatascience.com/type-annotations-in-python-d90990b172dc).
This works exactly the same way as it does with Pydantic. Check out the [Pydantic documentation on field types](https://pydantic-docs.helpmanual.io/usage/types/) for guidance.
### With HashModel
`HashModel` stores data in Redis Hashes, which are flat. This means that a Redis Hash can't contain a Redis Set, List, or Hash. Because of this requirement, `HashModel` also does not currently support container types, such as:
* Sets
* Lists
* Dictionaries and other "mapping" types
* Other Redis OM models
* Pydantic models
**NOTE**: In the future, we may serialize these values as JSON strings, the same way we do for `JsonModel`. The difference would be that in the case of `HashModel`, you wouldn't be able to index these fields, just get and save them with the model. With `JsonModel`, you can index list fields and embedded `JsonModel`s.
So, in short, if you want to use container types, use `JsonModel`.
### With JsonModel
Good news! Container types _are_ supported with `JsonModel`.
We will use Pydantic's JSON serialization and encoding to serialize your `JsonModel` and save it in Redis.
### Default Values
Fields can have default values. You set them by assigning a value to a field.
```python
import datetime
from typing import Optional
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: Optional[str] = "Super dope" # <- We added a default here
```
Now, if we create a `Customer` object without a `bio` field, it will use the default value.
```python
import datetime
from typing import Optional
from redis_om import HashModel
class Customer(HashModel):
first_name: str
last_name: str
email: str
join_date: datetime.date
age: int
bio: Optional[str] = "Super dope"
andrew = Customer(
first_name="Andrew",
last_name="Brookins",
email="andrew.brookins@example.com",
join_date=datetime.date.today(),
age=38) # <- Notice, we didn't give a bio!
print(andrew.bio) # <- So we got the default value.
# > 'Super Dope'
```
The model will then save this default value to Redis the next time you call `save()`.
## Marking a Field as Indexed
If you're using the RediSearch module in your Redis instance, you can mark a field as "indexed." As soon as you mark any field in a model as indexed, Redis OM will automatically create and manage an secondary index for the model for you, allowing you to query on any indexed field.
To mark a field as indexed, you need to use the Redis OM `Field()` helper, like this:
```python
from redis_om import (
Field,
HashModel,
)
class Customer(HashModel):
first_name: str
last_name: str = Field(index=True)
```
In this example, we marked `Customer.last_name` as indexed.
To create the indexes for any models that have indexed fields, use the `migrate` CLI command that Redis OM installs in your Python environment.
This command detects any `JsonModel` or `HashModel` instances in your project and does the following for each model that isn't abstract or embedded:
* If no index exists yet for the model:
* The migrator creates an index
* The migrator stores a hash of the index definition
* If an index exists for the model:
* The migrator checks if the stored hash for the index is out of date
* If the stored hash is out of date, the migrator drops the index (not your data!) and rebuilds it with the new index definition
You can also run the `Migrator` yourself with code:
```python
from redis_om import (
get_redis_connection,
Migrator
)
redis = get_redis_connection()
Migrator().run()
```

View file

@ -129,4 +129,21 @@ Once again, we get the validation error:
value is not a valid email address (type=value_error.email)
```
## Constrained Values
If you want to use any of the constr
Pydantic includes many type annotations to introduce constraints to your model field values.
The concept of "constraints" includes quite a few possibilities:
* Strings that are always lowercase
* Strings that must match a regular expression
* Integers within a range
* Integers that are a specific multiple
* And many more...
All of these constraint types work with Redis OM models. Read the [Pydantic documentation on constrained types](https://pydantic-docs.helpmanual.io/usage/types/#constrained-types) to learn more.
[pydantic-url]: https://github.com/samuelcolvin/pydantic

View file

@ -1,8 +1,9 @@
import abc
import dataclasses
import datetime
import decimal
from collections import namedtuple
from typing import Optional
from typing import Optional, Dict, Set, List
from unittest import mock
import pytest
@ -49,7 +50,7 @@ async def m(key_prefix, redis):
model_key_prefix = "member"
primary_key_pattern = ""
await Migrator(redis).run()
await Migrator().run()
return namedtuple("Models", ["BaseHashModel", "Order", "Member"])(
BaseHashModel, Order, Member
@ -365,6 +366,34 @@ def test_raises_error_with_embedded_models(m):
address: Address
def test_raises_error_with_dataclasses(m):
@dataclasses.dataclass
class Address:
address_line_1: str
with pytest.raises(RedisModelError):
class InvalidMember(m.BaseHashModel):
address: Address
def test_raises_error_with_dicts(m):
with pytest.raises(RedisModelError):
class InvalidMember(m.BaseHashModel):
address: Dict[str, str]
def test_raises_error_with_sets(m):
with pytest.raises(RedisModelError):
class InvalidMember(m.BaseHashModel):
friend_ids: Set[str]
def test_raises_error_with_lists(m):
with pytest.raises(RedisModelError):
class InvalidMember(m.BaseHashModel):
friend_ids: List[str]
@pytest.mark.asyncio
async def test_saves_many(m):
member1 = m.Member(

View file

@ -1,8 +1,9 @@
import abc
import dataclasses
import datetime
import decimal
from collections import namedtuple
from typing import List, Optional
from typing import List, Optional, Set, Dict
from unittest import mock
import pytest
@ -73,7 +74,7 @@ async def m(key_prefix, redis):
# Creates an embedded list of models.
orders: Optional[List[Order]]
await Migrator(redis).run()
await Migrator().run()
return namedtuple(
"Models", ["BaseJsonModel", "Note", "Address", "Item", "Order", "Member"]
@ -166,7 +167,7 @@ async def test_validation_passes(address, m):
@pytest.mark.asyncio
async def test_saves_model_and_creates_pk(address, m, redis):
await Migrator(redis).run()
await Migrator().run()
member = m.Member(
first_name="Andrew",
@ -650,7 +651,7 @@ async def test_list_field_limitations(m, redis):
# We need to import and run this manually because we defined
# our model classes within a function that runs after the test
# suite's migrator has already looked for migrations to run.
await Migrator(redis).run()
await Migrator().run()
witch = TarotWitch(tarot_cards=["death"])
await witch.save()
@ -658,6 +659,63 @@ async def test_list_field_limitations(m, redis):
assert actual == [witch]
@pytest.mark.asyncio
async def test_allows_dataclasses(m):
@dataclasses.dataclass
class Address:
address_line_1: str
class ValidMember(m.BaseJsonModel):
address: Address
address = Address(address_line_1="hey")
member = ValidMember(address=address)
await member.save()
member2 = await ValidMember.get(member.pk)
assert member2 == member
assert member2.address.address_line_1 == "hey"
@pytest.mark.asyncio
async def test_allows_and_serializes_dicts(m):
class ValidMember(m.BaseJsonModel):
address: Dict[str, str]
member = ValidMember(address={"address_line_1": "hey"})
await member.save()
member2 = await ValidMember.get(member.pk)
assert member2 == member
assert member2.address['address_line_1'] == "hey"
@pytest.mark.asyncio
async def test_allows_and_serializes_sets(m):
class ValidMember(m.BaseJsonModel):
friend_ids: Set[int]
member = ValidMember(friend_ids={1, 2})
await member.save()
member2 = await ValidMember.get(member.pk)
assert member2 == member
assert member2.friend_ids == {1, 2}
@pytest.mark.asyncio
async def test_allows_and_serializes_lists(m):
class ValidMember(m.BaseJsonModel):
friend_ids: List[int]
member = ValidMember(friend_ids=[1, 2])
await member.save()
member2 = await ValidMember.get(member.pk)
assert member2 == member
assert member2.friend_ids == [1, 2]
@pytest.mark.asyncio
async def test_schema(m, key_prefix):
# We need to build the key prefix because it will differ based on whether

View file

@ -35,7 +35,7 @@ async def m(key_prefix, redis):
model_key_prefix = "member"
primary_key_pattern = ""
await Migrator(redis).run()
await Migrator().run()
return namedtuple("Models", ["BaseHashModel", "Order", "Member"])(
BaseHashModel, Order, Member

View file

@ -24,7 +24,7 @@ async def m(key_prefix, redis):
join_date: datetime.date
age: int
await Migrator(redis).run()
await Migrator().run()
return namedtuple("Models", ["Member"])(Member)