2021-09-16 02:41:45 +02:00
|
|
|
import abc
|
|
|
|
import dataclasses
|
|
|
|
import decimal
|
|
|
|
import operator
|
2021-09-17 18:27:11 +02:00
|
|
|
from copy import copy, deepcopy
|
2021-08-31 03:08:07 +02:00
|
|
|
from dataclasses import dataclass
|
|
|
|
from enum import Enum
|
2021-09-16 02:41:45 +02:00
|
|
|
from functools import reduce
|
2021-08-31 03:08:07 +02:00
|
|
|
from typing import (
|
|
|
|
AbstractSet,
|
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
2021-09-01 21:56:06 +02:00
|
|
|
Sequence,
|
|
|
|
no_type_check,
|
|
|
|
Protocol,
|
|
|
|
List, Type
|
2021-08-31 03:08:07 +02:00
|
|
|
)
|
2021-09-01 01:30:31 +02:00
|
|
|
import uuid
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
import redis
|
2021-09-01 21:56:06 +02:00
|
|
|
from pydantic import BaseModel, validator
|
2021-08-31 03:08:07 +02:00
|
|
|
from pydantic.fields import FieldInfo as PydanticFieldInfo
|
|
|
|
from pydantic.fields import ModelField, Undefined, UndefinedType
|
2021-09-16 02:41:45 +02:00
|
|
|
from pydantic.main import ModelMetaclass
|
2021-09-01 21:56:06 +02:00
|
|
|
from pydantic.typing import NoArgAnyCallable
|
2021-08-31 03:08:07 +02:00
|
|
|
from pydantic.utils import Representation
|
|
|
|
|
|
|
|
from .encoders import jsonable_encoder
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
model_registry = {}
|
|
|
|
|
2021-08-31 03:08:07 +02:00
|
|
|
_T = TypeVar("_T")
|
|
|
|
|
|
|
|
|
|
|
|
class RedisModelError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class NotFoundError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
class Operators(Enum):
|
2021-08-31 03:08:07 +02:00
|
|
|
EQ = 1
|
2021-09-16 02:41:45 +02:00
|
|
|
NE = 2
|
|
|
|
LT = 3
|
|
|
|
LE = 4
|
|
|
|
GT = 5
|
|
|
|
GE = 6
|
|
|
|
OR = 7
|
|
|
|
AND = 8
|
|
|
|
NOT = 9
|
|
|
|
IN = 10
|
|
|
|
NOT_IN = 11
|
2021-09-17 18:27:11 +02:00
|
|
|
LIKE = 12
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
|
2021-09-16 21:03:03 +02:00
|
|
|
@dataclass
|
|
|
|
class NegatedExpression:
|
|
|
|
expression: 'Expression'
|
|
|
|
|
|
|
|
def __invert__(self):
|
|
|
|
return self.expression
|
|
|
|
|
2021-09-21 01:06:04 +02:00
|
|
|
def __and__(self, other):
|
|
|
|
return Expression(left=self, op=Operators.AND, right=other)
|
|
|
|
|
|
|
|
def __or__(self, other):
|
|
|
|
return Expression(left=self, op=Operators.OR, right=other)
|
|
|
|
|
2021-09-16 21:03:03 +02:00
|
|
|
|
2021-08-31 03:08:07 +02:00
|
|
|
@dataclass
|
|
|
|
class Expression:
|
2021-09-16 02:41:45 +02:00
|
|
|
op: Operators
|
|
|
|
left: Any
|
|
|
|
right: Any
|
|
|
|
|
2021-09-16 21:03:03 +02:00
|
|
|
def __invert__(self):
|
|
|
|
return NegatedExpression(self)
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
def __and__(self, other):
|
|
|
|
return Expression(left=self, op=Operators.AND, right=other)
|
|
|
|
|
2021-09-16 23:35:25 +02:00
|
|
|
def __or__(self, other):
|
|
|
|
return Expression(left=self, op=Operators.OR, right=other)
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
|
2021-09-16 21:03:03 +02:00
|
|
|
ExpressionOrNegated = Union[Expression, NegatedExpression]
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
class QueryNotSupportedError(Exception):
|
|
|
|
"""The attempted query is not supported."""
|
|
|
|
|
|
|
|
|
|
|
|
class RediSearchFieldTypes(Enum):
|
|
|
|
TEXT = 'TEXT'
|
|
|
|
TAG = 'TAG'
|
|
|
|
NUMERIC = 'NUMERIC'
|
|
|
|
GEO = 'GEO'
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: How to handle Geo fields?
|
|
|
|
NUMERIC_TYPES = (float, int, decimal.Decimal)
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class FindQuery:
|
|
|
|
expressions: Sequence[Expression]
|
|
|
|
expression: Expression = dataclasses.field(init=False)
|
|
|
|
query: str = dataclasses.field(init=False)
|
2021-09-16 23:35:25 +02:00
|
|
|
pagination: List[str] = dataclasses.field(init=False)
|
2021-09-16 02:41:45 +02:00
|
|
|
model: Type['RedisModel']
|
2021-09-16 23:35:25 +02:00
|
|
|
limit: Optional[int] = None
|
|
|
|
offset: Optional[int] = None
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
def __post_init__(self):
|
|
|
|
self.expression = reduce(operator.and_, self.expressions)
|
|
|
|
self.query = self.resolve_redisearch_query(self.expression)
|
2021-09-16 23:35:25 +02:00
|
|
|
self.pagination = self.resolve_redisearch_pagination()
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
def resolve_field_type(self, field: ModelField) -> RediSearchFieldTypes:
|
2021-09-21 01:06:04 +02:00
|
|
|
if getattr(field.field_info, 'primary_key', None) is True:
|
2021-09-16 02:41:45 +02:00
|
|
|
return RediSearchFieldTypes.TAG
|
2021-09-21 01:06:04 +02:00
|
|
|
elif getattr(field.field_info, 'full_text_search', None) is True:
|
|
|
|
return RediSearchFieldTypes.TEXT
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
field_type = field.outer_type_
|
|
|
|
|
|
|
|
# TODO: GEO
|
2021-09-17 18:27:11 +02:00
|
|
|
if any(issubclass(field_type, t) for t in NUMERIC_TYPES):
|
2021-09-16 02:41:45 +02:00
|
|
|
return RediSearchFieldTypes.NUMERIC
|
|
|
|
else:
|
2021-09-21 01:06:04 +02:00
|
|
|
# TAG fields are the default field type.
|
|
|
|
return RediSearchFieldTypes.TAG
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def expand_tag_value(value):
|
|
|
|
err = RedisModelError(f"Using the IN operator requires passing an iterable of "
|
|
|
|
"possible values. You passed: {value}")
|
|
|
|
if isinstance(str, value):
|
|
|
|
raise err
|
|
|
|
try:
|
|
|
|
expanded_value = "|".join(value)
|
|
|
|
except TypeError:
|
|
|
|
raise err
|
|
|
|
return expanded_value
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
def resolve_value(self, field_name: str, field_type: RediSearchFieldTypes,
|
|
|
|
op: Operators, value: Any) -> str:
|
|
|
|
result = ""
|
|
|
|
if field_type is RediSearchFieldTypes.TEXT:
|
|
|
|
result = f"@{field_name}:"
|
|
|
|
if op is Operators.EQ:
|
2021-09-16 21:03:03 +02:00
|
|
|
result += f'"{value}"'
|
|
|
|
elif op is Operators.NE:
|
|
|
|
result = f'-({result}"{value}")'
|
2021-09-16 02:41:45 +02:00
|
|
|
elif op is Operators.LIKE:
|
|
|
|
result += value
|
|
|
|
else:
|
2021-09-16 21:03:03 +02:00
|
|
|
# TODO: Handling TAG, TEXT switch-offs, etc.
|
|
|
|
raise QueryNotSupportedError("Only equals (=) and not-equals (!=) comparisons are "
|
|
|
|
"currently supported for TEXT fields. See docs: TODO")
|
2021-09-16 02:41:45 +02:00
|
|
|
elif field_type is RediSearchFieldTypes.NUMERIC:
|
|
|
|
if op is Operators.EQ:
|
|
|
|
result += f"@{field_name}:[{value} {value}]"
|
|
|
|
elif op is Operators.NE:
|
2021-09-21 01:06:04 +02:00
|
|
|
# TODO: Is this enough or do we also need a clause for all values
|
|
|
|
# ([-inf +inf]) from which we then subtract the undesirable value?
|
2021-09-16 02:41:45 +02:00
|
|
|
result += f"~(@{field_name}:[{value} {value}])"
|
|
|
|
elif op is Operators.GT:
|
|
|
|
result += f"@{field_name}:[({value} +inf]"
|
|
|
|
elif op is Operators.LT:
|
|
|
|
result += f"@{field_name}:[-inf ({value}]"
|
2021-09-17 18:27:11 +02:00
|
|
|
elif op is Operators.GE:
|
2021-09-16 02:41:45 +02:00
|
|
|
result += f"@{field_name}:[{value} +inf]"
|
2021-09-17 18:27:11 +02:00
|
|
|
elif op is Operators.LE:
|
2021-09-16 02:41:45 +02:00
|
|
|
result += f"@{field_name}:[-inf {value}]"
|
2021-09-21 01:06:04 +02:00
|
|
|
elif field_type is RediSearchFieldTypes.TAG:
|
|
|
|
if op is Operators.EQ:
|
|
|
|
result += f"@{field_name}:{{{value}}}"
|
|
|
|
elif op is Operators.NE:
|
|
|
|
result += f"~(@{field_name}:{{{value}}})"
|
|
|
|
elif op is Operators.IN:
|
|
|
|
expanded_value = self.expand_tag_value(value)
|
|
|
|
result += f"(@{field_name}:{{{expanded_value}}})"
|
|
|
|
elif op is Operators.NOT_IN:
|
|
|
|
expanded_value = self.expand_tag_value(value)
|
|
|
|
result += f"~(@{field_name}:{{{expanded_value}}})"
|
2021-09-16 21:03:03 +02:00
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
return result
|
|
|
|
|
2021-09-16 23:35:25 +02:00
|
|
|
def resolve_redisearch_pagination(self):
|
|
|
|
"""Resolve pagination options for a query."""
|
|
|
|
if not self.limit and not self.offset:
|
|
|
|
return []
|
|
|
|
offset = self.offset or 0
|
|
|
|
limit = self.limit or 10
|
|
|
|
return ["LIMIT", offset, limit]
|
|
|
|
|
2021-09-16 21:03:03 +02:00
|
|
|
def resolve_redisearch_query(self, expression: ExpressionOrNegated):
|
2021-09-16 02:41:45 +02:00
|
|
|
"""Resolve an expression to a string RediSearch query."""
|
|
|
|
field_type = None
|
|
|
|
field_name = None
|
2021-09-21 01:06:04 +02:00
|
|
|
encompassing_expression_is_negated = False
|
2021-09-16 02:41:45 +02:00
|
|
|
result = ""
|
2021-09-16 21:03:03 +02:00
|
|
|
|
|
|
|
if isinstance(expression, NegatedExpression):
|
2021-09-21 01:06:04 +02:00
|
|
|
encompassing_expression_is_negated = True
|
2021-09-16 21:03:03 +02:00
|
|
|
expression = expression.expression
|
2021-09-21 01:06:04 +02:00
|
|
|
|
|
|
|
if isinstance(expression.left, Expression) or \
|
|
|
|
isinstance(expression.left, NegatedExpression):
|
2021-09-16 02:41:45 +02:00
|
|
|
result += f"({self.resolve_redisearch_query(expression.left)})"
|
|
|
|
elif isinstance(expression.left, ModelField):
|
|
|
|
field_type = self.resolve_field_type(expression.left)
|
|
|
|
field_name = expression.left.name
|
|
|
|
else:
|
2021-09-21 01:06:04 +02:00
|
|
|
import ipdb; ipdb.set_trace()
|
|
|
|
raise QueryNotSupportedError(f"A query expression should start with either a field "
|
2021-09-16 02:41:45 +02:00
|
|
|
f"or an expression enclosed in parenthesis. See docs: "
|
|
|
|
f"TODO")
|
|
|
|
|
2021-09-16 21:03:03 +02:00
|
|
|
right = expression.right
|
|
|
|
right_is_negated = isinstance(right, NegatedExpression)
|
|
|
|
|
|
|
|
if isinstance(right, Expression) or right_is_negated:
|
2021-09-16 02:41:45 +02:00
|
|
|
if expression.op == Operators.AND:
|
2021-09-16 21:03:03 +02:00
|
|
|
result += " "
|
2021-09-16 02:41:45 +02:00
|
|
|
elif expression.op == Operators.OR:
|
2021-09-16 21:03:03 +02:00
|
|
|
result += "| "
|
2021-09-16 02:41:45 +02:00
|
|
|
else:
|
|
|
|
raise QueryNotSupportedError("You can only combine two query expressions with"
|
2021-09-16 21:03:03 +02:00
|
|
|
"AND (&) or OR (|). See docs: TODO")
|
|
|
|
|
|
|
|
if right_is_negated:
|
|
|
|
result += "-"
|
|
|
|
# We're handling the RediSearch operator in this call ("-"), so resolve the
|
|
|
|
# inner expression instead of the NegatedExpression.
|
|
|
|
right = right.expression
|
|
|
|
|
|
|
|
result += f"({self.resolve_redisearch_query(right)})"
|
2021-09-16 02:41:45 +02:00
|
|
|
else:
|
2021-09-16 21:03:03 +02:00
|
|
|
if isinstance(right, ModelField):
|
2021-09-16 02:41:45 +02:00
|
|
|
raise QueryNotSupportedError("Comparing fields is not supported. See docs: TODO")
|
|
|
|
else:
|
2021-09-16 21:03:03 +02:00
|
|
|
result += self.resolve_value(field_name, field_type, expression.op, right)
|
|
|
|
|
2021-09-21 01:06:04 +02:00
|
|
|
if encompassing_expression_is_negated:
|
2021-09-16 21:03:03 +02:00
|
|
|
result = f"-({result})"
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def find(self):
|
2021-09-16 23:35:25 +02:00
|
|
|
args = ["ft.search", self.model.Meta.index_name, self.query]
|
|
|
|
# TODO: Do we need self.pagination if we're just appending to query anyway?
|
|
|
|
if self.pagination:
|
|
|
|
args.extend(self.pagination)
|
|
|
|
return self.model.db().execute_command(*args)
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
|
2021-09-01 01:30:31 +02:00
|
|
|
class PrimaryKeyCreator(Protocol):
|
2021-09-01 21:56:06 +02:00
|
|
|
def create_pk(self, *args, **kwargs) -> str:
|
2021-09-01 01:30:31 +02:00
|
|
|
"""Create a new primary key"""
|
|
|
|
|
|
|
|
|
|
|
|
class Uuid4PrimaryKey:
|
2021-09-01 21:56:06 +02:00
|
|
|
def create_pk(self) -> str:
|
2021-09-01 01:30:31 +02:00
|
|
|
return str(uuid.uuid4())
|
|
|
|
|
|
|
|
|
2021-08-31 03:08:07 +02:00
|
|
|
class ExpressionProxy:
|
|
|
|
def __init__(self, field: ModelField):
|
|
|
|
self.field = field
|
|
|
|
|
|
|
|
def __eq__(self, other: Any) -> Expression:
|
2021-09-16 02:41:45 +02:00
|
|
|
return Expression(left=self.field, op=Operators.EQ, right=other)
|
|
|
|
|
|
|
|
def __ne__(self, other: Any) -> Expression:
|
|
|
|
return Expression(left=self.field, op=Operators.NE, right=other)
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
def __lt__(self, other: Any) -> Expression:
|
2021-09-16 02:41:45 +02:00
|
|
|
return Expression(left=self.field, op=Operators.LT, right=other)
|
|
|
|
|
|
|
|
def __le__(self, other: Any) -> Expression:
|
|
|
|
return Expression(left=self.field, op=Operators.LE, right=other)
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
def __gt__(self, other: Any) -> Expression:
|
2021-09-16 02:41:45 +02:00
|
|
|
return Expression(left=self.field, op=Operators.GT, right=other)
|
|
|
|
|
|
|
|
def __ge__(self, other: Any) -> Expression:
|
|
|
|
return Expression(left=self.field, op=Operators.GE, right=other)
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
|
|
|
|
def __dataclass_transform__(
|
|
|
|
*,
|
|
|
|
eq_default: bool = True,
|
|
|
|
order_default: bool = False,
|
|
|
|
kw_only_default: bool = False,
|
|
|
|
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
|
|
|
|
) -> Callable[[_T], _T]:
|
|
|
|
return lambda a: a
|
|
|
|
|
|
|
|
|
|
|
|
class FieldInfo(PydanticFieldInfo):
|
|
|
|
def __init__(self, default: Any = Undefined, **kwargs: Any) -> None:
|
|
|
|
primary_key = kwargs.pop("primary_key", False)
|
2021-09-16 02:41:45 +02:00
|
|
|
sortable = kwargs.pop("sortable", Undefined)
|
2021-08-31 03:08:07 +02:00
|
|
|
index = kwargs.pop("index", Undefined)
|
2021-09-17 18:27:11 +02:00
|
|
|
full_text_search = kwargs.pop("full_text_search", Undefined)
|
2021-08-31 03:08:07 +02:00
|
|
|
super().__init__(default=default, **kwargs)
|
|
|
|
self.primary_key = primary_key
|
2021-09-16 02:41:45 +02:00
|
|
|
self.sortable = sortable
|
2021-08-31 03:08:07 +02:00
|
|
|
self.index = index
|
2021-09-17 18:27:11 +02:00
|
|
|
self.full_text_search = full_text_search
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
|
|
|
|
class RelationshipInfo(Representation):
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
*,
|
|
|
|
back_populates: Optional[str] = None,
|
|
|
|
link_model: Optional[Any] = None,
|
|
|
|
) -> None:
|
|
|
|
self.back_populates = back_populates
|
|
|
|
self.link_model = link_model
|
|
|
|
|
|
|
|
|
|
|
|
def Field(
|
|
|
|
default: Any = Undefined,
|
|
|
|
*,
|
|
|
|
default_factory: Optional[NoArgAnyCallable] = None,
|
|
|
|
alias: str = None,
|
|
|
|
title: str = None,
|
|
|
|
description: str = None,
|
|
|
|
exclude: Union[
|
|
|
|
AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any
|
|
|
|
] = None,
|
|
|
|
include: Union[
|
|
|
|
AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any
|
|
|
|
] = None,
|
|
|
|
const: bool = None,
|
|
|
|
gt: float = None,
|
|
|
|
ge: float = None,
|
|
|
|
lt: float = None,
|
|
|
|
le: float = None,
|
|
|
|
multiple_of: float = None,
|
|
|
|
min_items: int = None,
|
|
|
|
max_items: int = None,
|
|
|
|
min_length: int = None,
|
|
|
|
max_length: int = None,
|
|
|
|
allow_mutation: bool = True,
|
|
|
|
regex: str = None,
|
|
|
|
primary_key: bool = False,
|
2021-09-16 02:41:45 +02:00
|
|
|
sortable: Union[bool, UndefinedType] = Undefined,
|
2021-08-31 03:08:07 +02:00
|
|
|
index: Union[bool, UndefinedType] = Undefined,
|
2021-09-17 18:27:11 +02:00
|
|
|
full_text_search: Union[bool, UndefinedType] = Undefined,
|
2021-08-31 03:08:07 +02:00
|
|
|
schema_extra: Optional[Dict[str, Any]] = None,
|
|
|
|
) -> Any:
|
|
|
|
current_schema_extra = schema_extra or {}
|
|
|
|
field_info = FieldInfo(
|
|
|
|
default,
|
|
|
|
default_factory=default_factory,
|
|
|
|
alias=alias,
|
|
|
|
title=title,
|
|
|
|
description=description,
|
|
|
|
exclude=exclude,
|
|
|
|
include=include,
|
|
|
|
const=const,
|
|
|
|
gt=gt,
|
|
|
|
ge=ge,
|
|
|
|
lt=lt,
|
|
|
|
le=le,
|
|
|
|
multiple_of=multiple_of,
|
|
|
|
min_items=min_items,
|
|
|
|
max_items=max_items,
|
|
|
|
min_length=min_length,
|
|
|
|
max_length=max_length,
|
|
|
|
allow_mutation=allow_mutation,
|
|
|
|
regex=regex,
|
|
|
|
primary_key=primary_key,
|
2021-09-16 02:41:45 +02:00
|
|
|
sortable=sortable,
|
2021-08-31 03:08:07 +02:00
|
|
|
index=index,
|
2021-09-17 18:27:11 +02:00
|
|
|
full_text_search=full_text_search,
|
2021-08-31 03:08:07 +02:00
|
|
|
**current_schema_extra,
|
|
|
|
)
|
|
|
|
field_info._validate()
|
|
|
|
return field_info
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class PrimaryKey:
|
|
|
|
name: str
|
|
|
|
field: ModelField
|
|
|
|
|
|
|
|
|
|
|
|
class DefaultMeta:
|
|
|
|
global_key_prefix: Optional[str] = None
|
|
|
|
model_key_prefix: Optional[str] = None
|
|
|
|
primary_key_pattern: Optional[str] = None
|
|
|
|
database: Optional[redis.Redis] = None
|
|
|
|
primary_key: Optional[PrimaryKey] = None
|
2021-09-01 21:56:06 +02:00
|
|
|
primary_key_creator_cls: Type[PrimaryKeyCreator] = None
|
2021-09-16 02:41:45 +02:00
|
|
|
index_name: str = None
|
|
|
|
abstract: bool = False
|
|
|
|
|
|
|
|
|
|
|
|
class ModelMeta(ModelMetaclass):
|
|
|
|
def __new__(cls, name, bases, attrs, **kwargs): # noqa C901
|
|
|
|
meta = attrs.pop('Meta', None)
|
|
|
|
new_class = super().__new__(cls, name, bases, attrs, **kwargs)
|
|
|
|
|
|
|
|
meta = meta or getattr(new_class, 'Meta', None)
|
|
|
|
base_meta = getattr(new_class, '_meta', None)
|
|
|
|
|
2021-09-17 18:27:11 +02:00
|
|
|
if meta and meta != DefaultMeta and meta != base_meta:
|
2021-09-16 02:41:45 +02:00
|
|
|
new_class.Meta = meta
|
|
|
|
new_class._meta = meta
|
|
|
|
elif base_meta:
|
2021-09-17 18:27:11 +02:00
|
|
|
new_class._meta = deepcopy(base_meta)
|
2021-09-16 02:41:45 +02:00
|
|
|
new_class.Meta = new_class._meta
|
|
|
|
# Unset inherited values we don't want to reuse (typically based on the model name).
|
|
|
|
new_class._meta.abstract = False
|
|
|
|
new_class._meta.model_key_prefix = None
|
|
|
|
new_class._meta.index_name = None
|
|
|
|
else:
|
2021-09-17 18:27:11 +02:00
|
|
|
new_class._meta = deepcopy(DefaultMeta)
|
2021-09-16 02:41:45 +02:00
|
|
|
new_class.Meta = new_class._meta
|
|
|
|
|
|
|
|
# Not an abstract model class
|
|
|
|
if abc.ABC not in bases:
|
2021-09-17 18:27:11 +02:00
|
|
|
key = f"{new_class.__module__}.{new_class.__name__}"
|
2021-09-16 02:41:45 +02:00
|
|
|
model_registry[key] = new_class
|
2021-08-31 03:08:07 +02:00
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
# Create proxies for each model field so that we can use the field
|
|
|
|
# in queries, like Model.get(Model.field_name == 1)
|
2021-09-17 18:27:11 +02:00
|
|
|
for field_name, field in new_class.__fields__.items():
|
|
|
|
setattr(new_class, field_name, ExpressionProxy(field))
|
2021-09-16 02:41:45 +02:00
|
|
|
# Check if this is our FieldInfo version with extended ORM metadata.
|
|
|
|
if isinstance(field.field_info, FieldInfo):
|
|
|
|
if field.field_info.primary_key:
|
2021-09-17 18:27:11 +02:00
|
|
|
new_class._meta.primary_key = PrimaryKey(name=field_name, field=field)
|
2021-09-16 02:41:45 +02:00
|
|
|
|
|
|
|
if not getattr(new_class._meta, 'global_key_prefix', None):
|
|
|
|
new_class._meta.global_key_prefix = getattr(base_meta, "global_key_prefix", "")
|
|
|
|
if not getattr(new_class._meta, 'model_key_prefix', None):
|
|
|
|
# Don't look at the base class for this.
|
2021-09-17 18:27:11 +02:00
|
|
|
new_class._meta.model_key_prefix = f"{new_class.__module__}.{new_class.__name__}"
|
2021-09-16 02:41:45 +02:00
|
|
|
if not getattr(new_class._meta, 'primary_key_pattern', None):
|
|
|
|
new_class._meta.primary_key_pattern = getattr(base_meta, "primary_key_pattern",
|
|
|
|
"{pk}")
|
|
|
|
if not getattr(new_class._meta, 'database', None):
|
|
|
|
new_class._meta.database = getattr(base_meta, "database",
|
|
|
|
redis.Redis(decode_responses=True))
|
|
|
|
if not getattr(new_class._meta, 'primary_key_creator_cls', None):
|
|
|
|
new_class._meta.primary_key_creator_cls = getattr(base_meta, "primary_key_creator_cls",
|
|
|
|
Uuid4PrimaryKey)
|
|
|
|
if not getattr(new_class._meta, 'index_name', None):
|
|
|
|
new_class._meta.index_name = f"{new_class._meta.global_key_prefix}:" \
|
|
|
|
f"{new_class._meta.model_key_prefix}:index"
|
|
|
|
|
|
|
|
return new_class
|
|
|
|
|
|
|
|
|
|
|
|
class RedisModel(BaseModel, abc.ABC, metaclass=ModelMeta):
|
2021-08-31 03:08:07 +02:00
|
|
|
pk: Optional[str] = Field(default=None, primary_key=True)
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
Meta = DefaultMeta
|
|
|
|
|
2021-08-31 03:08:07 +02:00
|
|
|
class Config:
|
|
|
|
orm_mode = True
|
|
|
|
arbitrary_types_allowed = True
|
|
|
|
extra = 'allow'
|
|
|
|
|
|
|
|
def __init__(__pydantic_self__, **data: Any) -> None:
|
2021-09-01 01:30:31 +02:00
|
|
|
super().__init__(**data)
|
2021-08-31 03:08:07 +02:00
|
|
|
__pydantic_self__.validate_primary_key()
|
|
|
|
|
2021-09-17 18:27:11 +02:00
|
|
|
def __lt__(self, other):
|
|
|
|
my_keys = set(self.__fields__.keys())
|
|
|
|
other_keys = set(other.__fields__.keys())
|
|
|
|
shared_keys = list(my_keys & other_keys)
|
|
|
|
lt = [getattr(self, k) < getattr(other, k) for k in shared_keys]
|
|
|
|
return len(lt) > len(shared_keys) / 2
|
|
|
|
|
2021-09-01 21:56:06 +02:00
|
|
|
@validator("pk", always=True)
|
|
|
|
def validate_pk(cls, v):
|
|
|
|
if not v:
|
2021-09-16 02:41:45 +02:00
|
|
|
v = cls._meta.primary_key_creator_cls().create_pk()
|
2021-09-01 21:56:06 +02:00
|
|
|
return v
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def validate_primary_key(cls):
|
|
|
|
"""Check for a primary key. We need one (and only one)."""
|
|
|
|
primary_keys = 0
|
|
|
|
for name, field in cls.__fields__.items():
|
2021-09-01 01:30:31 +02:00
|
|
|
if getattr(field.field_info, 'primary_key', None):
|
2021-08-31 03:08:07 +02:00
|
|
|
primary_keys += 1
|
|
|
|
if primary_keys == 0:
|
|
|
|
raise RedisModelError("You must define a primary key for the model")
|
|
|
|
elif primary_keys > 1:
|
|
|
|
raise RedisModelError("You must define only one primary key for a model")
|
|
|
|
|
|
|
|
@classmethod
|
2021-08-31 21:03:53 +02:00
|
|
|
def make_key(cls, part: str):
|
2021-09-16 02:41:45 +02:00
|
|
|
global_prefix = getattr(cls._meta, 'global_key_prefix', '').strip(":")
|
|
|
|
model_prefix = getattr(cls._meta, 'model_key_prefix', '').strip(":")
|
2021-09-01 21:56:06 +02:00
|
|
|
return f"{global_prefix}:{model_prefix}:{part}"
|
2021-08-31 03:08:07 +02:00
|
|
|
|
2021-08-31 21:03:53 +02:00
|
|
|
@classmethod
|
2021-09-01 01:30:31 +02:00
|
|
|
def make_primary_key(cls, pk: Any):
|
2021-08-31 21:03:53 +02:00
|
|
|
"""Return the Redis key for this model."""
|
2021-09-16 02:41:45 +02:00
|
|
|
return cls.make_key(cls._meta.primary_key_pattern.format(pk=pk))
|
2021-08-31 21:03:53 +02:00
|
|
|
|
|
|
|
def key(self):
|
|
|
|
"""Return the Redis key for this model."""
|
2021-09-16 02:41:45 +02:00
|
|
|
pk = getattr(self, self._meta.primary_key.field.name)
|
2021-08-31 21:03:53 +02:00
|
|
|
return self.make_primary_key(pk)
|
|
|
|
|
2021-08-31 03:08:07 +02:00
|
|
|
@classmethod
|
|
|
|
def db(cls):
|
2021-09-16 02:41:45 +02:00
|
|
|
return cls._meta.database
|
2021-09-16 21:03:03 +02:00
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
@classmethod
|
|
|
|
def from_redis(cls, res: Any):
|
|
|
|
import six
|
|
|
|
from six.moves import xrange, zip as izip
|
2021-09-16 21:03:03 +02:00
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
def to_string(s):
|
|
|
|
if isinstance(s, six.string_types):
|
|
|
|
return s
|
|
|
|
elif isinstance(s, six.binary_type):
|
|
|
|
return s.decode('utf-8','ignore')
|
|
|
|
else:
|
|
|
|
return s # Not a string we care about
|
2021-09-16 21:03:03 +02:00
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
docs = []
|
|
|
|
step = 2 # Because the result has content
|
|
|
|
offset = 1
|
|
|
|
|
|
|
|
for i in xrange(1, len(res), step):
|
|
|
|
fields_offset = offset
|
|
|
|
|
|
|
|
fields = dict(
|
|
|
|
dict(izip(map(to_string, res[i + fields_offset][::2]),
|
|
|
|
map(to_string, res[i + fields_offset][1::2])))
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
del fields['id']
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
doc = cls(**fields)
|
|
|
|
docs.append(doc)
|
|
|
|
return docs
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
@classmethod
|
2021-09-16 02:41:45 +02:00
|
|
|
def find(cls, *expressions: Expression):
|
|
|
|
query = FindQuery(expressions=expressions, model=cls)
|
|
|
|
raw_result = query.find()
|
|
|
|
return cls.from_redis(raw_result)
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
@classmethod
|
2021-09-16 23:35:25 +02:00
|
|
|
def find_one(cls, *expressions: Expression):
|
|
|
|
query = FindQuery(expressions=expressions, model=cls, limit=1, offset=0)
|
|
|
|
raw_result = query.find()
|
|
|
|
return cls.from_redis(raw_result)[0]
|
2021-08-31 03:08:07 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def add(cls, models: Sequence['RedisModel']) -> Sequence['RedisModel']:
|
|
|
|
return [model.save() for model in models]
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def update(cls, **field_values):
|
|
|
|
return cls
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def values(cls):
|
|
|
|
"""Return raw values from Redis instead of model instances."""
|
|
|
|
return cls
|
|
|
|
|
2021-08-31 21:03:53 +02:00
|
|
|
def delete(self):
|
|
|
|
return self.db().delete(self.key())
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
def save(self, *args, **kwargs) -> 'RedisModel':
|
2021-09-01 21:56:06 +02:00
|
|
|
raise NotImplementedError
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
@classmethod
|
|
|
|
def schema(cls):
|
2021-09-01 21:56:06 +02:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
class HashModel(RedisModel, abc.ABC):
|
2021-09-01 21:56:06 +02:00
|
|
|
def __init_subclass__(cls, **kwargs):
|
|
|
|
super().__init_subclass__(**kwargs)
|
|
|
|
|
|
|
|
for name, field in cls.__fields__.items():
|
|
|
|
if issubclass(field.outer_type_, RedisModel):
|
|
|
|
raise RedisModelError(f"HashModels cannot have embedded model "
|
|
|
|
f"fields. Field: {name}")
|
2021-08-31 03:08:07 +02:00
|
|
|
|
2021-09-01 21:56:06 +02:00
|
|
|
for typ in (Set, Mapping, List):
|
|
|
|
if issubclass(field.outer_type_, typ):
|
|
|
|
raise RedisModelError(f"HashModels cannot have set, list,"
|
|
|
|
f" or mapping fields. Field: {name}")
|
2021-08-31 03:08:07 +02:00
|
|
|
|
2021-09-01 21:56:06 +02:00
|
|
|
def save(self, *args, **kwargs) -> 'HashModel':
|
|
|
|
document = jsonable_encoder(self.dict())
|
2021-08-31 21:03:53 +02:00
|
|
|
success = self.db().hset(self.key(), mapping=document)
|
2021-09-01 21:56:06 +02:00
|
|
|
|
|
|
|
return success
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, pk: Any) -> 'HashModel':
|
|
|
|
document = cls.db().hgetall(cls.make_primary_key(pk))
|
|
|
|
if not document:
|
|
|
|
raise NotFoundError
|
|
|
|
return cls.parse_obj(document)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@no_type_check
|
|
|
|
def _get_value(cls, *args, **kwargs) -> Any:
|
|
|
|
"""
|
|
|
|
Always send None as an empty string.
|
|
|
|
|
|
|
|
TODO: We do this because redis-py's hset() method requires non-null
|
|
|
|
values. Is there a better way?
|
|
|
|
"""
|
|
|
|
val = super()._get_value(*args, **kwargs)
|
|
|
|
if val is None:
|
|
|
|
return ""
|
|
|
|
return val
|
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
@classmethod
|
2021-09-17 18:27:11 +02:00
|
|
|
def schema_for_type(cls, name, typ: Type, field_info: FieldInfo):
|
2021-09-16 02:41:45 +02:00
|
|
|
if any(issubclass(typ, t) for t in NUMERIC_TYPES):
|
|
|
|
return f"{name} NUMERIC"
|
2021-09-17 18:27:11 +02:00
|
|
|
elif issubclass(typ, str):
|
|
|
|
if getattr(field_info, 'full_text_search', False) is True:
|
|
|
|
return f"{name} TAG {name}_fts TEXT"
|
|
|
|
else:
|
|
|
|
return f"{name} TAG"
|
2021-09-16 02:41:45 +02:00
|
|
|
else:
|
2021-09-17 18:27:11 +02:00
|
|
|
return f"{name} TAG"
|
2021-09-16 21:03:03 +02:00
|
|
|
|
2021-09-16 02:41:45 +02:00
|
|
|
@classmethod
|
|
|
|
def schema(cls):
|
|
|
|
hash_prefix = cls.make_key(cls._meta.primary_key_pattern.format(pk=""))
|
|
|
|
schema_prefix = f"ON HASH PREFIX 1 {hash_prefix} SCHEMA"
|
|
|
|
schema_parts = [schema_prefix]
|
|
|
|
for name, field in cls.__fields__.items():
|
|
|
|
_type = field.outer_type_
|
|
|
|
if getattr(field.field_info, 'primary_key', None):
|
|
|
|
if issubclass(_type, str):
|
|
|
|
redisearch_field = f"{name} TAG"
|
|
|
|
else:
|
2021-09-17 18:27:11 +02:00
|
|
|
redisearch_field = cls.schema_for_type(name, _type, field.field_info)
|
2021-09-16 02:41:45 +02:00
|
|
|
schema_parts.append(redisearch_field)
|
2021-09-17 18:27:11 +02:00
|
|
|
elif getattr(field.field_info, 'index', None) is True:
|
|
|
|
schema_parts.append(cls.schema_for_type(name, _type, field.field_info))
|
|
|
|
if getattr(field.field_info, 'sortable', False) is True:
|
|
|
|
schema_parts.append("SORTABLE")
|
2021-09-16 02:41:45 +02:00
|
|
|
return " ".join(schema_parts)
|
|
|
|
|
|
|
|
|
|
|
|
class JsonModel(RedisModel, abc.ABC):
|
2021-09-01 21:56:06 +02:00
|
|
|
def save(self, *args, **kwargs) -> 'JsonModel':
|
|
|
|
success = self.db().execute_command('JSON.SET', self.key(), ".", self.json())
|
2021-08-31 03:08:07 +02:00
|
|
|
return success
|
2021-09-01 21:56:06 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get(cls, pk: Any) -> 'JsonModel':
|
|
|
|
document = cls.db().execute_command("JSON.GET", cls.make_primary_key(pk))
|
|
|
|
if not document:
|
|
|
|
raise NotFoundError
|
|
|
|
return cls.parse_raw(document)
|