Support both sync and asyncio uses
This commit is contained in:
parent
ca6ae7d6e9
commit
5ab53c916c
10 changed files with 47 additions and 70 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -133,4 +133,5 @@ data
|
|||
.install.stamp
|
||||
|
||||
# Sync version of the library, via Unasync
|
||||
redis_om/
|
||||
redis_om/
|
||||
tests_sync/
|
4
Makefile
4
Makefile
|
@ -62,14 +62,14 @@ format: $(INSTALL_STAMP) sync
|
|||
|
||||
.PHONY: test
|
||||
test: $(INSTALL_STAMP) sync
|
||||
$(POETRY) run pytest -n auto -s -vv ./tests/ --cov-report term-missing --cov $(NAME) $(SYNC_NAME)
|
||||
$(POETRY) run pytest -n auto -vv ./tests/ ./tests_sync/ --cov-report term-missing --cov $(NAME) $(SYNC_NAME)
|
||||
|
||||
.PHONY: test_oss
|
||||
test_oss: $(INSTALL_STAMP) sync
|
||||
# Specifically tests against a local OSS Redis instance via
|
||||
# docker-compose.yml. Do not use this for CI testing, where we should
|
||||
# instead have a matrix of Docker images.
|
||||
REDIS_OM_URL="redis://localhost:6381" $(POETRY) run pytest -n auto -s -vv ./tests/ --cov-report term-missing --cov $(NAME)
|
||||
REDIS_OM_URL="redis://localhost:6381" $(POETRY) run pytest -n auto -vv ./tests/ ./tests_sync/ --cov-report term-missing --cov $(NAME)
|
||||
|
||||
|
||||
.PHONY: shell
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from .migrations.migrator import MigrationError, Migrator
|
||||
from .model import EmbeddedJsonModel, Field, HashModel, JsonModel, RedisModel
|
||||
from .model import EmbeddedJsonModel, Field, HashModel, JsonModel, RedisModel, NotFoundError
|
||||
|
|
|
@ -6,8 +6,6 @@ from typing import List, Optional
|
|||
|
||||
from aioredis import Redis, ResponseError
|
||||
|
||||
from aredis_om.model.model import model_registry
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -96,6 +94,10 @@ class Migrator:
|
|||
if self.module:
|
||||
import_submodules(self.module)
|
||||
|
||||
# Import this at run-time to avoid triggering import-time side effects,
|
||||
# e.g. checks for RedisJSON, etc.
|
||||
from aredis_om.model.model import model_registry
|
||||
|
||||
for name, cls in model_registry.items():
|
||||
hash_key = schema_hash_key(cls.Meta.index_name)
|
||||
try:
|
||||
|
|
|
@ -10,7 +10,6 @@ from functools import reduce
|
|||
from typing import (
|
||||
AbstractSet,
|
||||
Any,
|
||||
AsyncGenerator,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
|
@ -1295,7 +1294,7 @@ class HashModel(RedisModel, abc.ABC):
|
|||
return self
|
||||
|
||||
@classmethod
|
||||
async def all_pks(cls) -> AsyncGenerator[str, None]: # type: ignore
|
||||
async def all_pks(cls): # type: ignore
|
||||
key_prefix = cls.make_key(cls._meta.primary_key_pattern.format(pk=""))
|
||||
# TODO: We assume the key ends with the default separator, ":" -- when
|
||||
# we make the separator configurable, we need to update this as well.
|
||||
|
@ -1437,13 +1436,16 @@ class HashModel(RedisModel, abc.ABC):
|
|||
|
||||
class JsonModel(RedisModel, abc.ABC):
|
||||
def __init_subclass__(cls, **kwargs):
|
||||
if not has_redis_json(cls.db()):
|
||||
# Generate the RediSearch schema once to validate fields.
|
||||
cls.redisearch_schema()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if not has_redis_json(self.db()):
|
||||
log.error(
|
||||
"Your Redis instance does not have the RedisJson module "
|
||||
"loaded. JsonModel depends on RedisJson."
|
||||
)
|
||||
# Generate the RediSearch schema once to validate fields.
|
||||
cls.redisearch_schema()
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
async def save(self, pipeline: Optional[Pipeline] = None) -> "JsonModel":
|
||||
self.check()
|
||||
|
|
20
make_sync.py
20
make_sync.py
|
@ -3,23 +3,29 @@ from pathlib import Path
|
|||
|
||||
import unasync
|
||||
|
||||
ADDITIONAL_REPLACEMENTS = {
|
||||
"aredis_om": "redis_om",
|
||||
"aioredis": "redis",
|
||||
":tests.": ":tests_sync.",
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
additional_replacements = {
|
||||
"aredis_om": "redis_om",
|
||||
"aioredis": "redis"
|
||||
}
|
||||
rules = [
|
||||
unasync.Rule(
|
||||
fromdir="/aredis_om/",
|
||||
todir="/redis_om/",
|
||||
additional_replacements=additional_replacements,
|
||||
additional_replacements=ADDITIONAL_REPLACEMENTS,
|
||||
),
|
||||
unasync.Rule(
|
||||
fromdir="/tests/",
|
||||
todir="/tests_sync/",
|
||||
additional_replacements=ADDITIONAL_REPLACEMENTS,
|
||||
),
|
||||
]
|
||||
|
||||
filepaths = []
|
||||
for root, _, filenames in os.walk(
|
||||
Path(__file__).absolute().parent / "aredis_om"
|
||||
Path(__file__).absolute().parent
|
||||
):
|
||||
for filename in filenames:
|
||||
if filename.rpartition(".")[-1] in ("py", "pyi",):
|
||||
|
|
|
@ -19,6 +19,7 @@ classifiers = [
|
|||
include=[
|
||||
"docs/*",
|
||||
"images/*",
|
||||
"redis_om/**/*",
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
|
|
47
setup.py
47
setup.py
File diff suppressed because one or more lines are too long
|
@ -14,9 +14,12 @@ from aredis_om import (
|
|||
Migrator,
|
||||
QueryNotSupportedError,
|
||||
RedisModelError,
|
||||
has_redisearch,
|
||||
)
|
||||
|
||||
# We need to run this check as sync code (during tests) even in async mode
|
||||
# because we call it in the top-level module scope.
|
||||
from redis_om import has_redisearch
|
||||
|
||||
|
||||
if not has_redisearch():
|
||||
pytestmark = pytest.mark.skip
|
||||
|
@ -438,7 +441,11 @@ def test_schema(m, key_prefix):
|
|||
another_integer: int
|
||||
another_float: float
|
||||
|
||||
# We need to build the key prefix because it will differ based on whether
|
||||
# these tests were copied into the tests_sync folder and unasynce'd.
|
||||
key_prefix = Address.make_key(Address._meta.primary_key_pattern.format(pk=""))
|
||||
|
||||
assert (
|
||||
Address.redisearch_schema()
|
||||
== f"ON HASH PREFIX 1 {key_prefix}:tests.test_hash_model.Address: SCHEMA pk TAG SEPARATOR | a_string TAG SEPARATOR | a_full_text_string TAG SEPARATOR | a_full_text_string_fts TEXT an_integer NUMERIC SORTABLE a_float NUMERIC"
|
||||
== f"ON HASH PREFIX 1 {key_prefix} SCHEMA pk TAG SEPARATOR | a_string TAG SEPARATOR | a_full_text_string TAG SEPARATOR | a_full_text_string_fts TEXT an_integer NUMERIC SORTABLE a_float NUMERIC"
|
||||
)
|
||||
|
|
|
@ -16,9 +16,12 @@ from aredis_om import (
|
|||
NotFoundError,
|
||||
QueryNotSupportedError,
|
||||
RedisModelError,
|
||||
has_redis_json,
|
||||
)
|
||||
|
||||
# We need to run this check as sync code (during tests) even in async mode
|
||||
# because we call it in the top-level module scope.
|
||||
from redis_om import has_redis_json
|
||||
|
||||
|
||||
if not has_redis_json():
|
||||
pytestmark = pytest.mark.skip
|
||||
|
@ -148,7 +151,6 @@ async def test_validates_field(address, m):
|
|||
)
|
||||
|
||||
|
||||
# Passes validation
|
||||
@pytest.mark.asyncio
|
||||
async def test_validation_passes(address, m):
|
||||
member = m.Member(
|
||||
|
@ -658,7 +660,10 @@ async def test_list_field_limitations(m, redis):
|
|||
|
||||
@pytest.mark.asyncio
|
||||
async def test_schema(m, key_prefix):
|
||||
# We need to build the key prefix because it will differ based on whether
|
||||
# these tests were copied into the tests_sync folder and unasynce'd.
|
||||
key_prefix = m.Member.make_key(m.Member._meta.primary_key_pattern.format(pk=""))
|
||||
assert (
|
||||
m.Member.redisearch_schema()
|
||||
== f"ON JSON PREFIX 1 {key_prefix}:tests.test_json_model.Member: SCHEMA $.pk AS pk TAG SEPARATOR | $.first_name AS first_name TAG SEPARATOR | $.last_name AS last_name TAG SEPARATOR | $.email AS email TAG SEPARATOR | $.age AS age NUMERIC $.bio AS bio TAG SEPARATOR | $.bio AS bio_fts TEXT $.address.pk AS address_pk TAG SEPARATOR | $.address.city AS address_city TAG SEPARATOR | $.address.postal_code AS address_postal_code TAG SEPARATOR | $.address.note.pk AS address_note_pk TAG SEPARATOR | $.address.note.description AS address_note_description TAG SEPARATOR | $.orders[*].pk AS orders_pk TAG SEPARATOR | $.orders[*].items[*].pk AS orders_items_pk TAG SEPARATOR | $.orders[*].items[*].name AS orders_items_name TAG SEPARATOR |"
|
||||
== f"ON JSON PREFIX 1 {key_prefix} SCHEMA $.pk AS pk TAG SEPARATOR | $.first_name AS first_name TAG SEPARATOR | $.last_name AS last_name TAG SEPARATOR | $.email AS email TAG SEPARATOR | $.age AS age NUMERIC $.bio AS bio TAG SEPARATOR | $.bio AS bio_fts TEXT $.address.pk AS address_pk TAG SEPARATOR | $.address.city AS address_city TAG SEPARATOR | $.address.postal_code AS address_postal_code TAG SEPARATOR | $.address.note.pk AS address_note_pk TAG SEPARATOR | $.address.note.description AS address_note_description TAG SEPARATOR | $.orders[*].pk AS orders_pk TAG SEPARATOR | $.orders[*].items[*].pk AS orders_items_pk TAG SEPARATOR | $.orders[*].items[*].name AS orders_items_name TAG SEPARATOR |"
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue