WIP on basic non-relational model functionality
This commit is contained in:
commit
ccad3de32d
11 changed files with 1666 additions and 0 deletions
0
redis_developer/__init__.py
Normal file
0
redis_developer/__init__.py
Normal file
5
redis_developer/orm/__init__.py
Normal file
5
redis_developer/orm/__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
from .model import (
|
||||
RedisModel,
|
||||
Relationship,
|
||||
Field
|
||||
)
|
5
redis_developer/orm/connections.py
Normal file
5
redis_developer/orm/connections.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
import redis
|
||||
|
||||
|
||||
def get_redis_connection() -> redis.Redis:
|
||||
return redis.Redis()
|
179
redis_developer/orm/encoders.py
Normal file
179
redis_developer/orm/encoders.py
Normal file
|
@ -0,0 +1,179 @@
|
|||
"""
|
||||
This file adapted from FastAPI's encoders.
|
||||
|
||||
Licensed under the MIT License (MIT).
|
||||
|
||||
Copyright (c) 2018 Sebastián Ramírez
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
"""
|
||||
|
||||
import dataclasses
|
||||
from collections import defaultdict
|
||||
from enum import Enum
|
||||
from pathlib import PurePath
|
||||
from types import GeneratorType
|
||||
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic.json import ENCODERS_BY_TYPE
|
||||
|
||||
SetIntStr = Set[Union[int, str]]
|
||||
DictIntStrAny = Dict[Union[int, str], Any]
|
||||
|
||||
|
||||
def generate_encoders_by_class_tuples(
|
||||
type_encoder_map: Dict[Any, Callable[[Any], Any]]
|
||||
) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]:
|
||||
encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(
|
||||
tuple
|
||||
)
|
||||
for type_, encoder in type_encoder_map.items():
|
||||
encoders_by_class_tuples[encoder] += (type_,)
|
||||
return encoders_by_class_tuples
|
||||
|
||||
|
||||
encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE)
|
||||
|
||||
|
||||
def jsonable_encoder(
|
||||
obj: Any,
|
||||
include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
|
||||
exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
|
||||
by_alias: bool = True,
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
custom_encoder: Dict[Any, Callable[[Any], Any]] = {},
|
||||
sqlalchemy_safe: bool = True,
|
||||
) -> Any:
|
||||
if include is not None and not isinstance(include, (set, dict)):
|
||||
include = set(include)
|
||||
if exclude is not None and not isinstance(exclude, (set, dict)):
|
||||
exclude = set(exclude)
|
||||
if isinstance(obj, BaseModel):
|
||||
encoder = getattr(obj.__config__, "json_encoders", {})
|
||||
if custom_encoder:
|
||||
encoder.update(custom_encoder)
|
||||
obj_dict = obj.dict(
|
||||
include=include, # type: ignore # in Pydantic
|
||||
exclude=exclude, # type: ignore # in Pydantic
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_none=exclude_none,
|
||||
exclude_defaults=exclude_defaults,
|
||||
)
|
||||
if "__root__" in obj_dict:
|
||||
obj_dict = obj_dict["__root__"]
|
||||
return jsonable_encoder(
|
||||
obj_dict,
|
||||
exclude_none=exclude_none,
|
||||
exclude_defaults=exclude_defaults,
|
||||
custom_encoder=encoder,
|
||||
sqlalchemy_safe=sqlalchemy_safe,
|
||||
)
|
||||
if dataclasses.is_dataclass(obj):
|
||||
return dataclasses.asdict(obj)
|
||||
if isinstance(obj, Enum):
|
||||
return obj.value
|
||||
if isinstance(obj, PurePath):
|
||||
return str(obj)
|
||||
if isinstance(obj, (str, int, float, type(None))):
|
||||
return obj
|
||||
if isinstance(obj, dict):
|
||||
encoded_dict = {}
|
||||
for key, value in obj.items():
|
||||
if (
|
||||
(
|
||||
not sqlalchemy_safe
|
||||
or (not isinstance(key, str))
|
||||
or (not key.startswith("_sa"))
|
||||
)
|
||||
and (value is not None or not exclude_none)
|
||||
and ((include and key in include) or not exclude or key not in exclude)
|
||||
):
|
||||
encoded_key = jsonable_encoder(
|
||||
key,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_none=exclude_none,
|
||||
custom_encoder=custom_encoder,
|
||||
sqlalchemy_safe=sqlalchemy_safe,
|
||||
)
|
||||
encoded_value = jsonable_encoder(
|
||||
value,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_none=exclude_none,
|
||||
custom_encoder=custom_encoder,
|
||||
sqlalchemy_safe=sqlalchemy_safe,
|
||||
)
|
||||
encoded_dict[encoded_key] = encoded_value
|
||||
return encoded_dict
|
||||
if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)):
|
||||
encoded_list = []
|
||||
for item in obj:
|
||||
encoded_list.append(
|
||||
jsonable_encoder(
|
||||
item,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
custom_encoder=custom_encoder,
|
||||
sqlalchemy_safe=sqlalchemy_safe,
|
||||
)
|
||||
)
|
||||
return encoded_list
|
||||
|
||||
if custom_encoder:
|
||||
if type(obj) in custom_encoder:
|
||||
return custom_encoder[type(obj)](obj)
|
||||
else:
|
||||
for encoder_type, encoder in custom_encoder.items():
|
||||
if isinstance(obj, encoder_type):
|
||||
return encoder(obj)
|
||||
|
||||
if type(obj) in ENCODERS_BY_TYPE:
|
||||
return ENCODERS_BY_TYPE[type(obj)](obj)
|
||||
for encoder, classes_tuple in encoders_by_class_tuples.items():
|
||||
if isinstance(obj, classes_tuple):
|
||||
return encoder(obj)
|
||||
|
||||
errors: List[Exception] = []
|
||||
try:
|
||||
data = dict(obj)
|
||||
except Exception as e:
|
||||
errors.append(e)
|
||||
try:
|
||||
data = vars(obj)
|
||||
except Exception as e:
|
||||
errors.append(e)
|
||||
raise ValueError(errors)
|
||||
return jsonable_encoder(
|
||||
data,
|
||||
by_alias=by_alias,
|
||||
exclude_unset=exclude_unset,
|
||||
exclude_defaults=exclude_defaults,
|
||||
exclude_none=exclude_none,
|
||||
custom_encoder=custom_encoder,
|
||||
sqlalchemy_safe=sqlalchemy_safe,
|
||||
)
|
390
redis_developer/orm/model.py
Normal file
390
redis_developer/orm/model.py
Normal file
|
@ -0,0 +1,390 @@
|
|||
import datetime
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import (
|
||||
AbstractSet,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
Sequence, ClassVar, TYPE_CHECKING, no_type_check,
|
||||
)
|
||||
|
||||
import redis
|
||||
from pydantic import BaseModel
|
||||
from pydantic.fields import FieldInfo as PydanticFieldInfo
|
||||
from pydantic.fields import ModelField, Undefined, UndefinedType
|
||||
from pydantic.main import BaseConfig, ModelMetaclass, validate_model
|
||||
from pydantic.typing import NoArgAnyCallable, resolve_annotations
|
||||
from pydantic.utils import Representation
|
||||
|
||||
from .encoders import jsonable_encoder
|
||||
from .util import uuid_from_time
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
class RedisModelError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Operations(Enum):
|
||||
EQ = 1
|
||||
LT = 2
|
||||
GT = 3
|
||||
|
||||
|
||||
@dataclass
|
||||
class Expression:
|
||||
field: ModelField
|
||||
op: Operations
|
||||
right_value: Any
|
||||
|
||||
|
||||
class ExpressionProxy:
|
||||
def __init__(self, field: ModelField):
|
||||
self.field = field
|
||||
|
||||
def __eq__(self, other: Any) -> Expression:
|
||||
return Expression(field=self.field, op=Operations.EQ, right_value=other)
|
||||
|
||||
def __lt__(self, other: Any) -> Expression:
|
||||
return Expression(field=self.field, op=Operations.LT, right_value=other)
|
||||
|
||||
def __gt__(self, other: Any) -> Expression:
|
||||
return Expression(field=self.field, op=Operations.GT, right_value=other)
|
||||
|
||||
|
||||
def __dataclass_transform__(
|
||||
*,
|
||||
eq_default: bool = True,
|
||||
order_default: bool = False,
|
||||
kw_only_default: bool = False,
|
||||
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
|
||||
) -> Callable[[_T], _T]:
|
||||
return lambda a: a
|
||||
|
||||
|
||||
class FieldInfo(PydanticFieldInfo):
|
||||
def __init__(self, default: Any = Undefined, **kwargs: Any) -> None:
|
||||
primary_key = kwargs.pop("primary_key", False)
|
||||
nullable = kwargs.pop("nullable", Undefined)
|
||||
foreign_key = kwargs.pop("foreign_key", Undefined)
|
||||
index = kwargs.pop("index", Undefined)
|
||||
unique = kwargs.pop("unique", Undefined)
|
||||
super().__init__(default=default, **kwargs)
|
||||
self.primary_key = primary_key
|
||||
self.nullable = nullable
|
||||
self.foreign_key = foreign_key
|
||||
self.index = index
|
||||
self.unique = unique
|
||||
|
||||
|
||||
class RelationshipInfo(Representation):
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
back_populates: Optional[str] = None,
|
||||
link_model: Optional[Any] = None,
|
||||
) -> None:
|
||||
self.back_populates = back_populates
|
||||
self.link_model = link_model
|
||||
|
||||
|
||||
def Field(
|
||||
default: Any = Undefined,
|
||||
*,
|
||||
default_factory: Optional[NoArgAnyCallable] = None,
|
||||
alias: str = None,
|
||||
title: str = None,
|
||||
description: str = None,
|
||||
exclude: Union[
|
||||
AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any
|
||||
] = None,
|
||||
include: Union[
|
||||
AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], Any
|
||||
] = None,
|
||||
const: bool = None,
|
||||
gt: float = None,
|
||||
ge: float = None,
|
||||
lt: float = None,
|
||||
le: float = None,
|
||||
multiple_of: float = None,
|
||||
min_items: int = None,
|
||||
max_items: int = None,
|
||||
min_length: int = None,
|
||||
max_length: int = None,
|
||||
allow_mutation: bool = True,
|
||||
regex: str = None,
|
||||
primary_key: bool = False,
|
||||
unique: bool = False,
|
||||
foreign_key: Optional[Any] = None,
|
||||
nullable: Union[bool, UndefinedType] = Undefined,
|
||||
index: Union[bool, UndefinedType] = Undefined,
|
||||
schema_extra: Optional[Dict[str, Any]] = None,
|
||||
) -> Any:
|
||||
current_schema_extra = schema_extra or {}
|
||||
field_info = FieldInfo(
|
||||
default,
|
||||
default_factory=default_factory,
|
||||
alias=alias,
|
||||
title=title,
|
||||
description=description,
|
||||
exclude=exclude,
|
||||
include=include,
|
||||
const=const,
|
||||
gt=gt,
|
||||
ge=ge,
|
||||
lt=lt,
|
||||
le=le,
|
||||
multiple_of=multiple_of,
|
||||
min_items=min_items,
|
||||
max_items=max_items,
|
||||
min_length=min_length,
|
||||
max_length=max_length,
|
||||
allow_mutation=allow_mutation,
|
||||
regex=regex,
|
||||
primary_key=primary_key,
|
||||
unique=unique,
|
||||
foreign_key=foreign_key,
|
||||
nullable=nullable,
|
||||
index=index,
|
||||
**current_schema_extra,
|
||||
)
|
||||
field_info._validate()
|
||||
return field_info
|
||||
|
||||
|
||||
def Relationship(
|
||||
*,
|
||||
back_populates: Optional[str] = None,
|
||||
link_model: Optional[Any] = None
|
||||
) -> Any:
|
||||
relationship_info = RelationshipInfo(
|
||||
back_populates=back_populates,
|
||||
link_model=link_model,
|
||||
)
|
||||
return relationship_info
|
||||
|
||||
|
||||
@__dataclass_transform__(kw_only_default=True, field_descriptors=(Field, FieldInfo))
|
||||
class RedisModelMetaclass(ModelMetaclass):
|
||||
__redismodel_relationships__: Dict[str, RelationshipInfo]
|
||||
__config__: Type[BaseConfig]
|
||||
__fields__: Dict[str, ModelField]
|
||||
|
||||
# From Pydantic
|
||||
def __new__(cls, name, bases, class_dict: dict, **kwargs) -> Any:
|
||||
relationships: Dict[str, RelationshipInfo] = {}
|
||||
dict_for_pydantic = {}
|
||||
original_annotations = resolve_annotations(
|
||||
class_dict.get("__annotations__", {}), class_dict.get("__module__", None)
|
||||
)
|
||||
pydantic_annotations = {}
|
||||
relationship_annotations = {}
|
||||
for k, v in class_dict.items():
|
||||
if isinstance(v, RelationshipInfo):
|
||||
relationships[k] = v
|
||||
else:
|
||||
dict_for_pydantic[k] = v
|
||||
for k, v in original_annotations.items():
|
||||
if k in relationships:
|
||||
relationship_annotations[k] = v
|
||||
else:
|
||||
pydantic_annotations[k] = v
|
||||
dict_used = {
|
||||
**dict_for_pydantic,
|
||||
"__weakref__": None,
|
||||
"__redismodel_relationships__": relationships,
|
||||
"__annotations__": pydantic_annotations,
|
||||
}
|
||||
# Duplicate logic from Pydantic to filter config kwargs because if they are
|
||||
# passed directly including the registry Pydantic will pass them over to the
|
||||
# superclass causing an error
|
||||
allowed_config_kwargs: Set[str] = {
|
||||
key
|
||||
for key in dir(BaseConfig)
|
||||
if not (
|
||||
key.startswith("__") and key.endswith("__")
|
||||
) # skip dunder methods and attributes
|
||||
}
|
||||
pydantic_kwargs = kwargs.copy()
|
||||
config_kwargs = {
|
||||
key: pydantic_kwargs.pop(key)
|
||||
for key in pydantic_kwargs.keys() & allowed_config_kwargs
|
||||
}
|
||||
new_cls = super().__new__(cls, name, bases, dict_used, **config_kwargs)
|
||||
new_cls.__annotations__ = {
|
||||
**relationship_annotations,
|
||||
**pydantic_annotations,
|
||||
**new_cls.__annotations__,
|
||||
}
|
||||
return new_cls
|
||||
|
||||
|
||||
@dataclass
|
||||
class PrimaryKey:
|
||||
name: str
|
||||
field: ModelField
|
||||
|
||||
|
||||
class DefaultMeta:
|
||||
global_key_prefix: Optional[str] = None
|
||||
model_key_prefix: Optional[str] = None
|
||||
primary_key_pattern: Optional[str] = None
|
||||
database: Optional[redis.Redis] = None
|
||||
primary_key: Optional[PrimaryKey] = None
|
||||
|
||||
|
||||
class RedisModel(BaseModel, metaclass=RedisModelMetaclass):
|
||||
"""
|
||||
TODO: Convert expressions to Redis commands, execute
|
||||
TODO: Key prefix vs. "key pattern" (that's actually the primary key pattern)
|
||||
TODO: Default key prefix is model name lowercase
|
||||
TODO: Build primary key pattern from PK field name, model prefix
|
||||
TODO: Default PK pattern is model name:pk field
|
||||
"""
|
||||
pk: Optional[str] = Field(default=None, primary_key=True)
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
arbitrary_types_allowed = True
|
||||
extra = 'allow'
|
||||
|
||||
Meta = DefaultMeta
|
||||
|
||||
def __init_subclass__(cls, **kwargs):
|
||||
# Create proxies for each model field so that we can use the field
|
||||
# in queries, like Model.get(Model.field_name == 1)
|
||||
super().__init_subclass__(**kwargs)
|
||||
|
||||
for name, field in cls.__fields__.items():
|
||||
setattr(cls, name, ExpressionProxy(field))
|
||||
# Check if this is our FieldInfo version with extended ORM metadata.
|
||||
if isinstance(field.field_info, FieldInfo):
|
||||
if field.field_info.primary_key:
|
||||
cls.Meta.primary_key = PrimaryKey(name=name, field=field)
|
||||
if not hasattr(cls.Meta, 'primary_key_pattern'):
|
||||
cls.Meta.primary_key_pattern = f"{cls.Meta.primary_key.name}:{{pk}}"
|
||||
|
||||
def __init__(__pydantic_self__, **data: Any) -> None:
|
||||
# Uses something other than `self` the first arg to allow "self" as a
|
||||
# settable attribute
|
||||
if TYPE_CHECKING:
|
||||
__pydantic_self__.__dict__: Dict[str, Any] = {}
|
||||
__pydantic_self__.__fields_set__: Set[str] = set()
|
||||
|
||||
values, fields_set, validation_error = validate_model(
|
||||
__pydantic_self__.__class__, data
|
||||
)
|
||||
|
||||
if validation_error:
|
||||
raise validation_error
|
||||
|
||||
__pydantic_self__.validate_primary_key()
|
||||
|
||||
object.__setattr__(__pydantic_self__, '__dict__', values)
|
||||
|
||||
@classmethod
|
||||
@no_type_check
|
||||
def _get_value(cls, *args, **kwargs) -> Any:
|
||||
"""
|
||||
Always send None as an empty string.
|
||||
|
||||
TODO: How broken is this?
|
||||
"""
|
||||
val = super()._get_value(*args, **kwargs)
|
||||
if val is None:
|
||||
return ""
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def validate_primary_key(cls):
|
||||
"""Check for a primary key. We need one (and only one)."""
|
||||
primary_keys = 0
|
||||
for name, field in cls.__fields__.items():
|
||||
if field.field_info.primary_key:
|
||||
primary_keys += 1
|
||||
|
||||
# TODO: Automatically create a primary key field instead?
|
||||
if primary_keys == 0:
|
||||
raise RedisModelError("You must define a primary key for the model")
|
||||
elif primary_keys > 1:
|
||||
raise RedisModelError("You must define only one primary key for a model")
|
||||
|
||||
@classmethod
|
||||
def key(cls, part: str):
|
||||
global_prefix = getattr(cls.Meta, 'global_key_prefix', '')
|
||||
model_prefix = getattr(cls.Meta, 'model_key_prefix', '')
|
||||
return f"{global_prefix}{model_prefix}{part}"
|
||||
|
||||
@classmethod
|
||||
def get(cls, pk: Any):
|
||||
# TODO: Getting related objects
|
||||
pk_pattern = cls.Meta.primary_key_pattern.format(pk=str(pk))
|
||||
print("GET ", cls.key(pk_pattern))
|
||||
document = cls.db().hgetall(cls.key(pk_pattern))
|
||||
if not document:
|
||||
raise NotFoundError
|
||||
return cls.parse_obj(document)
|
||||
|
||||
def delete(self):
|
||||
# TODO: deleting relationships
|
||||
pk = self.__fields__[self.Meta.primary_key.field.name]
|
||||
pk_pattern = self.Meta.primary_key_pattern.format(pk=pk)
|
||||
return self.db().delete(self.key(pk_pattern))
|
||||
|
||||
@classmethod
|
||||
def db(cls):
|
||||
return cls.Meta.database
|
||||
|
||||
@classmethod
|
||||
def filter(cls, *expressions: Sequence[Expression]):
|
||||
return cls
|
||||
|
||||
@classmethod
|
||||
def exclude(cls, *expressions: Sequence[Expression]):
|
||||
return cls
|
||||
|
||||
@classmethod
|
||||
def add(cls, models: Sequence['RedisModel']) -> Sequence['RedisModel']:
|
||||
return [model.save() for model in models]
|
||||
|
||||
@classmethod
|
||||
def update(cls, **field_values):
|
||||
return cls
|
||||
|
||||
@classmethod
|
||||
def values(cls):
|
||||
"""Return raw values from Redis instead of model instances."""
|
||||
return cls
|
||||
|
||||
def save(self) -> 'RedisModel':
|
||||
pk_field = self.Meta.primary_key.field
|
||||
document = jsonable_encoder(self.dict())
|
||||
pk = document[pk_field.name]
|
||||
|
||||
if not pk:
|
||||
pk = str(uuid_from_time(datetime.datetime.now()))
|
||||
setattr(self, pk_field.name, pk)
|
||||
document[pk_field.name] = pk
|
||||
|
||||
pk_pattern = self.Meta.primary_key_pattern.format(pk=pk)
|
||||
success = self.db().hset(self.key(pk_pattern), mapping=document)
|
||||
return success
|
||||
|
||||
Meta = DefaultMeta
|
||||
|
||||
def __init__(self, **data: Any) -> None:
|
||||
"""Validate that a model instance has a primary key."""
|
||||
super().__init__(**data)
|
71
redis_developer/orm/util.py
Normal file
71
redis_developer/orm/util.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
# Adapted from the Cassandra Python driver.
|
||||
#
|
||||
# Copyright DataStax, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import calendar
|
||||
import random
|
||||
import uuid
|
||||
|
||||
|
||||
def uuid_from_time(time_arg, node=None, clock_seq=None):
|
||||
"""
|
||||
Converts a datetime or timestamp to a type 1 :class:`uuid.UUID`.
|
||||
|
||||
:param time_arg:
|
||||
The time to use for the timestamp portion of the UUID.
|
||||
This can either be a :class:`datetime` object or a timestamp
|
||||
in seconds (as returned from :meth:`time.time()`).
|
||||
:type datetime: :class:`datetime` or timestamp
|
||||
|
||||
:param node:
|
||||
None integer for the UUID (up to 48 bits). If not specified, this
|
||||
field is randomized.
|
||||
:type node: long
|
||||
|
||||
:param clock_seq:
|
||||
Clock sequence field for the UUID (up to 14 bits). If not specified,
|
||||
a random sequence is generated.
|
||||
:type clock_seq: int
|
||||
|
||||
:rtype: :class:`uuid.UUID`
|
||||
|
||||
"""
|
||||
if hasattr(time_arg, 'utctimetuple'):
|
||||
seconds = int(calendar.timegm(time_arg.utctimetuple()))
|
||||
microseconds = (seconds * 1e6) + time_arg.time().microsecond
|
||||
else:
|
||||
microseconds = int(time_arg * 1e6)
|
||||
|
||||
# 0x01b21dd213814000 is the number of 100-ns intervals between the
|
||||
# UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
|
||||
intervals = int(microseconds * 10) + 0x01b21dd213814000
|
||||
|
||||
time_low = intervals & 0xffffffff
|
||||
time_mid = (intervals >> 32) & 0xffff
|
||||
time_hi_version = (intervals >> 48) & 0x0fff
|
||||
|
||||
if clock_seq is None:
|
||||
clock_seq = random.getrandbits(14)
|
||||
else:
|
||||
if clock_seq > 0x3fff:
|
||||
raise ValueError('clock_seq is out of range (need a 14-bit value)')
|
||||
|
||||
clock_seq_low = clock_seq & 0xff
|
||||
clock_seq_hi_variant = 0x80 | ((clock_seq >> 8) & 0x3f)
|
||||
|
||||
if node is None:
|
||||
node = random.getrandbits(48)
|
||||
|
||||
return uuid.UUID(fields=(time_low, time_mid, time_hi_version,
|
||||
clock_seq_hi_variant, clock_seq_low, node), version=1)
|
Loading…
Add table
Add a link
Reference in a new issue