Module omnipy.modules.raw.serializers
Overview
View Source
from typing import Any, IO, Type
from omnipy.data.dataset import Dataset
from omnipy.data.model import Model
from omnipy.data.serializer import TarFileSerializer
# from typing_inspect import get_generic_bases, get_generic_type, get_origin, get_parameters
class RawDatasetToTarFileSerializer(TarFileSerializer):
""""""
@classmethod
def is_dataset_directly_supported(cls, dataset: Dataset) -> bool:
return type(dataset) is Dataset[Model[str]]
@classmethod
def get_dataset_cls_for_new(cls) -> Type[Dataset]:
return Dataset[Model[str]]
@classmethod
def get_output_file_suffix(cls) -> str:
return 'raw'
@classmethod
def serialize(cls, dataset: Dataset[Model[str]]) -> bytes:
def raw_encode_func(contents: str) -> bytes:
return contents.encode('utf8')
return cls.create_tarfile_from_dataset(dataset, data_encode_func=raw_encode_func)
@classmethod
def deserialize(cls, tarfile_bytes: bytes) -> Dataset[Model[str]]:
dataset = Dataset[Model[str]]()
def raw_decode_func(file_stream: IO[bytes]) -> str:
return file_stream.read().decode('utf8')
def python_dictify_object(obj_type: str, obj_val: Any) -> dict:
return {obj_type: obj_val}
cls.create_dataset_from_tarfile(
dataset,
tarfile_bytes,
data_decode_func=raw_decode_func,
dictify_object_func=python_dictify_object,
import_method='from_data') # noqa
return dataset
Classes
RawDatasetToTarFileSerializer
View Source
class RawDatasetToTarFileSerializer(TarFileSerializer):
""""""
@classmethod
def is_dataset_directly_supported(cls, dataset: Dataset) -> bool:
return type(dataset) is Dataset[Model[str]]
@classmethod
def get_dataset_cls_for_new(cls) -> Type[Dataset]:
return Dataset[Model[str]]
@classmethod
def get_output_file_suffix(cls) -> str:
return 'raw'
@classmethod
def serialize(cls, dataset: Dataset[Model[str]]) -> bytes:
def raw_encode_func(contents: str) -> bytes:
return contents.encode('utf8')
return cls.create_tarfile_from_dataset(dataset, data_encode_func=raw_encode_func)
@classmethod
def deserialize(cls, tarfile_bytes: bytes) -> Dataset[Model[str]]:
dataset = Dataset[Model[str]]()
def raw_decode_func(file_stream: IO[bytes]) -> str:
return file_stream.read().decode('utf8')
def python_dictify_object(obj_type: str, obj_val: Any) -> dict:
return {obj_type: obj_val}
cls.create_dataset_from_tarfile(
dataset,
tarfile_bytes,
data_decode_func=raw_decode_func,
dictify_object_func=python_dictify_object,
import_method='from_data') # noqa
return dataset
Static methods
create_dataset_from_tarfile
def create_dataset_from_tarfile(
dataset: omnipy.data.dataset.Dataset,
tarfile_bytes: bytes,
data_decode_func: Callable[[IO[bytes]], Any],
dictify_object_func: Callable[[str, Any], dict | str],
import_method='from_data'
)
Parameters:
Name | Type | Description | Default |
---|---|---|---|
dataset |
Dataset |
||
tarfile_bytes |
bytes |
||
data_decode_func |
Callable[[IO[bytes]], Any] |
||
dictify_object_func |
Callable[[str, Any], dict |
str] | |
import_method |
from_data |
View Source
@classmethod
def create_dataset_from_tarfile(cls,
dataset: Dataset,
tarfile_bytes: bytes,
data_decode_func: Callable[[IO[bytes]], Any],
dictify_object_func: Callable[[str, Any], dict | str],
import_method='from_data'):
with tarfile.open(fileobj=BytesIO(tarfile_bytes), mode='r:gz') as tarfile_stream:
for filename in tarfile_stream.getnames():
obj_type_file = tarfile_stream.extractfile(filename)
assert filename.endswith(f'.{cls.get_output_file_suffix()}')
obj_type = '.'.join(filename.split('.')[:-1])
getattr(dataset, import_method)(
dictify_object_func(obj_type, data_decode_func(obj_type_file)))
create_tarfile_from_dataset
def create_tarfile_from_dataset(
dataset: omnipy.data.dataset.Dataset,
data_encode_func: Callable[[Any], bytes | memoryview]
)
Parameters:
Name | Type | Description | Default |
---|---|---|---|
dataset |
Dataset |
||
data_encode_func |
Callable[[Any], bytes |
memoryview] |
View Source
@classmethod
def create_tarfile_from_dataset(cls,
dataset: Dataset,
data_encode_func: Callable[[Any], bytes | memoryview]):
bytes_io = BytesIO()
with tarfile.open(fileobj=bytes_io, mode='w:gz') as tarfile_stream:
for obj_type, data_obj in dataset.items():
json_data_bytestream = BytesIO(data_encode_func(data_obj))
json_data_bytestream.seek(0)
tarinfo = TarInfo(name=f'{obj_type}.{cls.get_output_file_suffix()}')
tarinfo.size = len(json_data_bytestream.getbuffer())
tarfile_stream.addfile(tarinfo, json_data_bytestream)
return bytes_io.getbuffer().tobytes()
deserialize
def deserialize(
tarfile_bytes: bytes
) -> omnipy.data.dataset.Dataset[Annotated[Optional[omnipy.data.model.Model[Annotated[Optional[str], 'Fake Optional from Model']]], 'Fake Optional from Dataset']]
Parameters:
Name | Type | Description | Default |
---|---|---|---|
tarfile_bytes |
bytes |
Returns:
Type | Description |
---|---|
Dataset[Annotated[Optional[Model[Annotated[Optional[str], 'Fake Optional from Model']]], 'Fake Optional from Dataset']] |
View Source
@classmethod
def deserialize(cls, tarfile_bytes: bytes) -> Dataset[Model[str]]:
dataset = Dataset[Model[str]]()
def raw_decode_func(file_stream: IO[bytes]) -> str:
return file_stream.read().decode('utf8')
def python_dictify_object(obj_type: str, obj_val: Any) -> dict:
return {obj_type: obj_val}
cls.create_dataset_from_tarfile(
dataset,
tarfile_bytes,
data_decode_func=raw_decode_func,
dictify_object_func=python_dictify_object,
import_method='from_data') # noqa
return dataset
get_dataset_cls_for_new
Returns:
Type | Description |
---|---|
Type[Dataset] |
View Source
@classmethod
def get_dataset_cls_for_new(cls) -> Type[Dataset]:
return Dataset[Model[str]]
get_output_file_suffix
Returns:
Type | Description |
---|---|
str |
View Source
@classmethod
def get_output_file_suffix(cls) -> str:
return 'raw'
is_dataset_directly_supported
Parameters:
Name | Type | Description | Default |
---|---|---|---|
dataset |
Dataset |
Returns:
Type | Description |
---|---|
bool |
View Source
@classmethod
def is_dataset_directly_supported(cls, dataset: Dataset) -> bool:
return type(dataset) is Dataset[Model[str]]
serialize
def serialize(
dataset: omnipy.data.dataset.Dataset[Annotated[Optional[omnipy.data.model.Model[Annotated[Optional[str], 'Fake Optional from Model']]], 'Fake Optional from Dataset']]
) -> bytes
Parameters:
Name | Type | Description | Default |
---|---|---|---|
dataset |
Dataset[Annotated[Optional[Model[Annotated[Optional[str], 'Fake Optional from Model']]], 'Fake Optional from Dataset']] |
Returns:
Type | Description |
---|---|
bytes |
View Source
@classmethod
def serialize(cls, dataset: Dataset[Model[str]]) -> bytes:
def raw_encode_func(contents: str) -> bytes:
return contents.encode('utf8')
return cls.create_tarfile_from_dataset(dataset, data_encode_func=raw_encode_func)