2021-09-16 02:41:45 +02:00
|
|
|
import hashlib
|
|
|
|
import logging
|
|
|
|
from dataclasses import dataclass
|
|
|
|
from enum import Enum
|
2021-11-10 00:59:10 +01:00
|
|
|
from typing import List, Optional
|
2021-09-16 02:41:45 +02:00
|
|
|
|
2022-08-10 14:21:13 +02:00
|
|
|
from ... import redis
|
2021-09-16 02:41:45 +02:00
|
|
|
|
2021-10-20 22:01:46 +02:00
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2021-10-20 22:01:46 +02:00
|
|
|
import importlib # noqa: E402
|
|
|
|
import pkgutil # noqa: E402
|
|
|
|
|
|
|
|
|
|
|
|
class MigrationError(Exception):
|
|
|
|
pass
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
|
|
|
|
def import_submodules(root_module_name: str):
|
|
|
|
"""Import all submodules of a module, recursively."""
|
|
|
|
# TODO: Call this without specifying a module name, to import everything?
|
|
|
|
root_module = importlib.import_module(root_module_name)
|
2021-10-20 22:01:46 +02:00
|
|
|
|
|
|
|
if not hasattr(root_module, "__path__"):
|
|
|
|
raise MigrationError(
|
|
|
|
"The root module must be a Python package. "
|
|
|
|
f"You specified: {root_module_name}"
|
|
|
|
)
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
for loader, module_name, is_pkg in pkgutil.walk_packages(
|
2021-10-20 22:01:46 +02:00
|
|
|
root_module.__path__, root_module.__name__ + "." # type: ignore
|
|
|
|
):
|
2021-09-16 02:41:45 +02:00
|
|
|
importlib.import_module(module_name)
|
|
|
|
|
|
|
|
|
|
|
|
def schema_hash_key(index_name):
|
|
|
|
return f"{index_name}:hash"
|
|
|
|
|
|
|
|
|
2022-08-10 14:21:13 +02:00
|
|
|
async def create_index(conn: redis.Redis, index_name, schema, current_hash):
|
|
|
|
db_number = conn.connection_pool.connection_kwargs.get("db")
|
2022-04-07 11:22:32 +02:00
|
|
|
if db_number and db_number > 0:
|
2022-04-07 11:02:50 +02:00
|
|
|
raise MigrationError(
|
|
|
|
"Creating search indexes is only supported in database 0. "
|
|
|
|
f"You attempted to create an index in database {db_number}"
|
|
|
|
)
|
2021-10-21 02:35:46 +02:00
|
|
|
try:
|
2022-08-22 17:04:10 +02:00
|
|
|
await conn.ft(index_name).info()
|
2022-08-10 14:21:13 +02:00
|
|
|
except redis.ResponseError:
|
|
|
|
await conn.execute_command(f"ft.create {index_name} {schema}")
|
|
|
|
# TODO: remove "type: ignore" when type stubs will be fixed
|
|
|
|
await conn.set(schema_hash_key(index_name), current_hash) # type: ignore
|
2021-10-21 08:24:31 +02:00
|
|
|
else:
|
2021-10-21 02:35:46 +02:00
|
|
|
log.info("Index already exists, skipping. Index hash: %s", index_name)
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
|
|
|
|
class MigrationAction(Enum):
|
|
|
|
CREATE = 2
|
|
|
|
DROP = 1
|
|
|
|
|
|
|
|
|
2021-09-17 18:27:11 +02:00
|
|
|
@dataclass
|
2021-09-16 02:41:45 +02:00
|
|
|
class IndexMigration:
|
|
|
|
model_name: str
|
|
|
|
index_name: str
|
|
|
|
schema: str
|
|
|
|
hash: str
|
|
|
|
action: MigrationAction
|
2022-08-10 14:21:13 +02:00
|
|
|
conn: redis.Redis
|
2021-09-16 02:41:45 +02:00
|
|
|
previous_hash: Optional[str] = None
|
2021-09-17 18:27:11 +02:00
|
|
|
|
2021-10-22 15:33:05 +02:00
|
|
|
async def run(self):
|
2021-09-16 02:41:45 +02:00
|
|
|
if self.action is MigrationAction.CREATE:
|
2021-10-22 15:33:05 +02:00
|
|
|
await self.create()
|
2021-09-16 02:41:45 +02:00
|
|
|
elif self.action is MigrationAction.DROP:
|
2021-10-22 15:33:05 +02:00
|
|
|
await self.drop()
|
2021-09-17 18:27:11 +02:00
|
|
|
|
2021-10-22 15:33:05 +02:00
|
|
|
async def create(self):
|
2021-10-21 08:24:31 +02:00
|
|
|
try:
|
2022-08-10 14:21:13 +02:00
|
|
|
await create_index(self.conn, self.index_name, self.schema, self.hash)
|
|
|
|
except redis.ResponseError:
|
2021-10-21 08:24:31 +02:00
|
|
|
log.info("Index already exists: %s", self.index_name)
|
2021-09-17 18:27:11 +02:00
|
|
|
|
2021-10-22 15:33:05 +02:00
|
|
|
async def drop(self):
|
2021-10-21 08:24:31 +02:00
|
|
|
try:
|
2022-08-22 17:04:10 +02:00
|
|
|
await self.conn.ft(self.index_name).dropindex()
|
2022-08-10 14:21:13 +02:00
|
|
|
except redis.ResponseError:
|
2021-10-21 08:24:31 +02:00
|
|
|
log.info("Index does not exist: %s", self.index_name)
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Migrator:
|
2021-11-25 03:12:27 +01:00
|
|
|
def __init__(self, module=None):
|
2021-10-22 15:33:05 +02:00
|
|
|
self.module = module
|
2021-11-10 00:59:10 +01:00
|
|
|
self.migrations: List[IndexMigration] = []
|
2021-10-22 15:33:05 +02:00
|
|
|
|
2021-11-25 03:12:27 +01:00
|
|
|
async def detect_migrations(self):
|
2021-10-22 15:33:05 +02:00
|
|
|
# Try to load any modules found under the given path or module name.
|
|
|
|
if self.module:
|
|
|
|
import_submodules(self.module)
|
2021-09-16 02:41:45 +02:00
|
|
|
|
2021-11-10 20:31:02 +01:00
|
|
|
# Import this at run-time to avoid triggering import-time side effects,
|
|
|
|
# e.g. checks for RedisJSON, etc.
|
|
|
|
from aredis_om.model.model import model_registry
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
for name, cls in model_registry.items():
|
|
|
|
hash_key = schema_hash_key(cls.Meta.index_name)
|
2022-08-10 14:21:13 +02:00
|
|
|
conn = cls.db()
|
2021-09-16 02:41:45 +02:00
|
|
|
try:
|
2021-09-30 05:23:39 +02:00
|
|
|
schema = cls.redisearch_schema()
|
2021-09-16 02:41:45 +02:00
|
|
|
except NotImplementedError:
|
|
|
|
log.info("Skipping migrations for %s", name)
|
|
|
|
continue
|
2021-10-20 22:01:46 +02:00
|
|
|
current_hash = hashlib.sha1(schema.encode("utf-8")).hexdigest() # nosec
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
try:
|
2022-08-22 17:04:10 +02:00
|
|
|
await conn.ft(cls.Meta.index_name).info()
|
2022-08-10 14:21:13 +02:00
|
|
|
except redis.ResponseError:
|
2021-09-16 02:41:45 +02:00
|
|
|
self.migrations.append(
|
2021-10-20 22:01:46 +02:00
|
|
|
IndexMigration(
|
|
|
|
name,
|
|
|
|
cls.Meta.index_name,
|
|
|
|
schema,
|
|
|
|
current_hash,
|
|
|
|
MigrationAction.CREATE,
|
2022-08-10 14:21:13 +02:00
|
|
|
conn,
|
2021-10-20 22:01:46 +02:00
|
|
|
)
|
|
|
|
)
|
2021-09-17 18:27:11 +02:00
|
|
|
continue
|
2021-09-16 02:41:45 +02:00
|
|
|
|
2022-08-10 14:21:13 +02:00
|
|
|
stored_hash = await conn.get(hash_key)
|
2021-09-16 02:41:45 +02:00
|
|
|
schema_out_of_date = current_hash != stored_hash
|
|
|
|
|
|
|
|
if schema_out_of_date:
|
|
|
|
# TODO: Switch out schema with an alias to avoid downtime -- separate migration?
|
|
|
|
self.migrations.append(
|
2021-10-20 22:01:46 +02:00
|
|
|
IndexMigration(
|
|
|
|
name,
|
|
|
|
cls.Meta.index_name,
|
|
|
|
schema,
|
|
|
|
current_hash,
|
|
|
|
MigrationAction.DROP,
|
2022-08-10 14:21:13 +02:00
|
|
|
conn,
|
2021-10-20 22:01:46 +02:00
|
|
|
stored_hash,
|
|
|
|
)
|
|
|
|
)
|
2021-09-16 02:41:45 +02:00
|
|
|
self.migrations.append(
|
2021-10-20 22:01:46 +02:00
|
|
|
IndexMigration(
|
|
|
|
name,
|
|
|
|
cls.Meta.index_name,
|
|
|
|
schema,
|
|
|
|
current_hash,
|
|
|
|
MigrationAction.CREATE,
|
2022-08-10 14:21:13 +02:00
|
|
|
conn,
|
2021-10-20 22:01:46 +02:00
|
|
|
stored_hash,
|
|
|
|
)
|
|
|
|
)
|
2021-09-16 02:41:45 +02:00
|
|
|
|
2021-11-25 03:12:27 +01:00
|
|
|
async def run(self):
|
2021-09-16 02:41:45 +02:00
|
|
|
# TODO: Migration history
|
|
|
|
# TODO: Dry run with output
|
2021-11-25 03:12:27 +01:00
|
|
|
await self.detect_migrations()
|
2021-09-16 02:41:45 +02:00
|
|
|
for migration in self.migrations:
|
2021-10-22 15:33:05 +02:00
|
|
|
await migration.run()
|