Skip to content

Module arti.views.python

None

None

View Source
from __future__ import annotations

from datetime import date, datetime

from arti.types.python import python_type_system

from arti.views import View

class PythonBuiltin(View):

    _abstract_ = True

    type_system = python_type_system

class Date(PythonBuiltin):

    python_type = date

class Datetime(PythonBuiltin):

    python_type = datetime

class Dict(PythonBuiltin):

    python_type = dict

class Float(PythonBuiltin):

    python_type = float

class Int(PythonBuiltin):

    python_type = int

class List(PythonBuiltin):

    python_type = list

class Null(PythonBuiltin):

    python_type = None

class Str(PythonBuiltin):

    python_type = str

Classes

Date

class Date(
    __pydantic_self__,
    **data: Any
)
View Source
class Date(PythonBuiltin):

    python_type = date

Ancestors (in MRO)

  • arti.views.python.PythonBuiltin
  • arti.views.View
  • arti.internal.models.Model
  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

Config
priority
python_type
type_system

Static methods

construct

def construct(
    _fields_set: Optional[ForwardRef('SetStr')] = None,
    **values: Any
) -> 'Model'

Creates a new model setting dict and fields_set from trusted or pre-validated data.

Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

from_annotation

def from_annotation(
    annotation: 'Any',
    *,
    mode: 'MODE'
) -> 'View'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def from_annotation(cls, annotation: Any, *, mode: MODE) -> View:

        view_class = cls.get_class_for(annotation)

        view = view_class(mode=mode, **cls._get_kwargs_from_annotation(annotation))

        view.check_annotation_compatibility(annotation)

        return view

from_orm

def from_orm(
    obj: Any
) -> 'Model'

get_class_for

def get_class_for(
    annotation: 'Any'
) -> 'builtins.type[View]'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def get_class_for(cls, annotation: Any) -> builtins.type[View]:

        view_class = get_item_from_annotated(annotation, cls, is_subclass=True)

        if view_class is None:

            # We've already searched for a View instance in the original Annotated args, so just

            # extract the root annotation.

            annotation = discard_Annotated(annotation)

            # Import the View submodules to trigger registration.

            import_submodules(__path__, __name__)

            view_class = cls._by_python_type_.get(annotation)

            # If no match and the type is a subscripted Generic (eg: `list[int]`), try to unwrap any

            # extra type variables.

            if view_class is None and (origin := get_origin(annotation)) is not None:

                view_class = cls._by_python_type_.get(origin)

            if view_class is None:

                raise ValueError(

                    f"{annotation} cannot be matched to a View, try setting one explicitly (eg: `Annotated[int, arti.views.python.Int]`)"

                )

        return view_class

parse_file

def parse_file(
    path: Union[str, pathlib.Path],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

parse_obj

def parse_obj(
    obj: Any
) -> 'Model'

parse_raw

def parse_raw(
    b: Union[str, bytes],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

schema

def schema(
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}'
) -> 'DictStrAny'

schema_json

def schema_json(
    *,
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}',
    **dumps_kwargs: Any
) -> 'unicode'

update_forward_refs

def update_forward_refs(
    **localns: Any
) -> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate

def validate(
    value: Any
) -> 'Model'

Instance variables

fingerprint

Methods

check_annotation_compatibility

def check_annotation_compatibility(
    self,
    annotation: 'Any'
) -> 'None'
View Source
    def check_annotation_compatibility(self, annotation: Any) -> None:

        # We're only checking the root annotation (lenient_issubclass ignores Annotated anyway), so

        # tidy up the value to improve error messages.

        annotation = discard_Annotated(annotation)

        system_type = self.type_system.to_system(self.type, hints={})

        if not (

            lenient_issubclass(system_type, annotation)

            or lenient_issubclass(type(system_type), annotation)

        ):

            raise ValueError(f"{annotation} cannot be used to represent {self.type}")

check_artifact_compatibility

def check_artifact_compatibility(
    self,
    artifact: 'Artifact'
) -> 'None'
View Source
    def check_artifact_compatibility(self, artifact: Artifact) -> None:

        if not isinstance(artifact, self.artifact_class):

            raise ValueError(f"expected an instance of {self.artifact_class}, got {type(artifact)}")

        self._check_type_compatibility(view_type=self.type, artifact_type=artifact.type)

        if self.mode in {"READ", "READWRITE"}:

            io._read.lookup(

                type(artifact.type),

                type(artifact.format),

                list[artifact.storage.storage_partition_type],  # type: ignore[name-defined]

                type(self),

            )

        if self.mode in {"WRITE", "READWRITE"}:

            io._write.lookup(

                self.python_type,

                type(artifact.type),

                type(artifact.format),

                artifact.storage.storage_partition_type,

                type(self),

            )

copy

def copy(
    self,
    *,
    deep: 'bool' = False,
    validate: 'bool' = True,
    **kwargs: 'Any'
) -> 'Self'

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters:

Name Type Description Default
include None fields to include in new model None
exclude None fields to exclude from new model, as with values this takes precedence over include None
update None values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data None
deep None set to True to make a deep copy of the model None

Returns:

Type Description
None new model instance
View Source
    def copy(self, *, deep: bool = False, validate: bool = True, **kwargs: Any) -> Self:

        copy = super().copy(deep=deep, **kwargs)

        if validate:

            # NOTE: We set exclude_unset=False so that all existing defaulted fields are reused (as

            # is normal `.copy` behavior).

            #

            # To reduce `repr` noise, we'll reset .__fields_set__ to those of the pre-validation copy

            # (which includes those originally set + updated).

            fields_set = copy.__fields_set__

            copy = copy.validate(

                dict(copy._iter(to_dict=False, by_alias=False, exclude_unset=False))

            )

            # Use object.__setattr__ to bypass frozen model assignment errors

            object.__setattr__(copy, "__fields_set__", set(fields_set))

            # Copy over the private attributes, which are missing after validation (since we're only

            # passing the fields).

            for name in self.__private_attributes__:

                if (value := getattr(self, name, Undefined)) is not Undefined:

                    if deep:

                        value = deepcopy(value)

                    object.__setattr__(copy, name, value)

        return copy

dict

def dict(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False
) -> 'DictStrAny'

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

json

def json(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any
) -> 'unicode'

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

Datetime

class Datetime(
    __pydantic_self__,
    **data: Any
)
View Source
class Datetime(PythonBuiltin):

    python_type = datetime

Ancestors (in MRO)

  • arti.views.python.PythonBuiltin
  • arti.views.View
  • arti.internal.models.Model
  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

Config
priority
python_type
type_system

Static methods

construct

def construct(
    _fields_set: Optional[ForwardRef('SetStr')] = None,
    **values: Any
) -> 'Model'

Creates a new model setting dict and fields_set from trusted or pre-validated data.

Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

from_annotation

def from_annotation(
    annotation: 'Any',
    *,
    mode: 'MODE'
) -> 'View'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def from_annotation(cls, annotation: Any, *, mode: MODE) -> View:

        view_class = cls.get_class_for(annotation)

        view = view_class(mode=mode, **cls._get_kwargs_from_annotation(annotation))

        view.check_annotation_compatibility(annotation)

        return view

from_orm

def from_orm(
    obj: Any
) -> 'Model'

get_class_for

def get_class_for(
    annotation: 'Any'
) -> 'builtins.type[View]'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def get_class_for(cls, annotation: Any) -> builtins.type[View]:

        view_class = get_item_from_annotated(annotation, cls, is_subclass=True)

        if view_class is None:

            # We've already searched for a View instance in the original Annotated args, so just

            # extract the root annotation.

            annotation = discard_Annotated(annotation)

            # Import the View submodules to trigger registration.

            import_submodules(__path__, __name__)

            view_class = cls._by_python_type_.get(annotation)

            # If no match and the type is a subscripted Generic (eg: `list[int]`), try to unwrap any

            # extra type variables.

            if view_class is None and (origin := get_origin(annotation)) is not None:

                view_class = cls._by_python_type_.get(origin)

            if view_class is None:

                raise ValueError(

                    f"{annotation} cannot be matched to a View, try setting one explicitly (eg: `Annotated[int, arti.views.python.Int]`)"

                )

        return view_class

parse_file

def parse_file(
    path: Union[str, pathlib.Path],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

parse_obj

def parse_obj(
    obj: Any
) -> 'Model'

parse_raw

def parse_raw(
    b: Union[str, bytes],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

schema

def schema(
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}'
) -> 'DictStrAny'

schema_json

def schema_json(
    *,
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}',
    **dumps_kwargs: Any
) -> 'unicode'

update_forward_refs

def update_forward_refs(
    **localns: Any
) -> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate

def validate(
    value: Any
) -> 'Model'

Instance variables

fingerprint

Methods

check_annotation_compatibility

def check_annotation_compatibility(
    self,
    annotation: 'Any'
) -> 'None'
View Source
    def check_annotation_compatibility(self, annotation: Any) -> None:

        # We're only checking the root annotation (lenient_issubclass ignores Annotated anyway), so

        # tidy up the value to improve error messages.

        annotation = discard_Annotated(annotation)

        system_type = self.type_system.to_system(self.type, hints={})

        if not (

            lenient_issubclass(system_type, annotation)

            or lenient_issubclass(type(system_type), annotation)

        ):

            raise ValueError(f"{annotation} cannot be used to represent {self.type}")

check_artifact_compatibility

def check_artifact_compatibility(
    self,
    artifact: 'Artifact'
) -> 'None'
View Source
    def check_artifact_compatibility(self, artifact: Artifact) -> None:

        if not isinstance(artifact, self.artifact_class):

            raise ValueError(f"expected an instance of {self.artifact_class}, got {type(artifact)}")

        self._check_type_compatibility(view_type=self.type, artifact_type=artifact.type)

        if self.mode in {"READ", "READWRITE"}:

            io._read.lookup(

                type(artifact.type),

                type(artifact.format),

                list[artifact.storage.storage_partition_type],  # type: ignore[name-defined]

                type(self),

            )

        if self.mode in {"WRITE", "READWRITE"}:

            io._write.lookup(

                self.python_type,

                type(artifact.type),

                type(artifact.format),

                artifact.storage.storage_partition_type,

                type(self),

            )

copy

def copy(
    self,
    *,
    deep: 'bool' = False,
    validate: 'bool' = True,
    **kwargs: 'Any'
) -> 'Self'

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters:

Name Type Description Default
include None fields to include in new model None
exclude None fields to exclude from new model, as with values this takes precedence over include None
update None values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data None
deep None set to True to make a deep copy of the model None

Returns:

Type Description
None new model instance
View Source
    def copy(self, *, deep: bool = False, validate: bool = True, **kwargs: Any) -> Self:

        copy = super().copy(deep=deep, **kwargs)

        if validate:

            # NOTE: We set exclude_unset=False so that all existing defaulted fields are reused (as

            # is normal `.copy` behavior).

            #

            # To reduce `repr` noise, we'll reset .__fields_set__ to those of the pre-validation copy

            # (which includes those originally set + updated).

            fields_set = copy.__fields_set__

            copy = copy.validate(

                dict(copy._iter(to_dict=False, by_alias=False, exclude_unset=False))

            )

            # Use object.__setattr__ to bypass frozen model assignment errors

            object.__setattr__(copy, "__fields_set__", set(fields_set))

            # Copy over the private attributes, which are missing after validation (since we're only

            # passing the fields).

            for name in self.__private_attributes__:

                if (value := getattr(self, name, Undefined)) is not Undefined:

                    if deep:

                        value = deepcopy(value)

                    object.__setattr__(copy, name, value)

        return copy

dict

def dict(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False
) -> 'DictStrAny'

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

json

def json(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any
) -> 'unicode'

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

Dict

class Dict(
    __pydantic_self__,
    **data: Any
)
View Source
class Dict(PythonBuiltin):

    python_type = dict

Ancestors (in MRO)

  • arti.views.python.PythonBuiltin
  • arti.views.View
  • arti.internal.models.Model
  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

Config
priority
python_type
type_system

Static methods

construct

def construct(
    _fields_set: Optional[ForwardRef('SetStr')] = None,
    **values: Any
) -> 'Model'

Creates a new model setting dict and fields_set from trusted or pre-validated data.

Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

from_annotation

def from_annotation(
    annotation: 'Any',
    *,
    mode: 'MODE'
) -> 'View'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def from_annotation(cls, annotation: Any, *, mode: MODE) -> View:

        view_class = cls.get_class_for(annotation)

        view = view_class(mode=mode, **cls._get_kwargs_from_annotation(annotation))

        view.check_annotation_compatibility(annotation)

        return view

from_orm

def from_orm(
    obj: Any
) -> 'Model'

get_class_for

def get_class_for(
    annotation: 'Any'
) -> 'builtins.type[View]'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def get_class_for(cls, annotation: Any) -> builtins.type[View]:

        view_class = get_item_from_annotated(annotation, cls, is_subclass=True)

        if view_class is None:

            # We've already searched for a View instance in the original Annotated args, so just

            # extract the root annotation.

            annotation = discard_Annotated(annotation)

            # Import the View submodules to trigger registration.

            import_submodules(__path__, __name__)

            view_class = cls._by_python_type_.get(annotation)

            # If no match and the type is a subscripted Generic (eg: `list[int]`), try to unwrap any

            # extra type variables.

            if view_class is None and (origin := get_origin(annotation)) is not None:

                view_class = cls._by_python_type_.get(origin)

            if view_class is None:

                raise ValueError(

                    f"{annotation} cannot be matched to a View, try setting one explicitly (eg: `Annotated[int, arti.views.python.Int]`)"

                )

        return view_class

parse_file

def parse_file(
    path: Union[str, pathlib.Path],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

parse_obj

def parse_obj(
    obj: Any
) -> 'Model'

parse_raw

def parse_raw(
    b: Union[str, bytes],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

schema

def schema(
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}'
) -> 'DictStrAny'

schema_json

def schema_json(
    *,
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}',
    **dumps_kwargs: Any
) -> 'unicode'

update_forward_refs

def update_forward_refs(
    **localns: Any
) -> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate

def validate(
    value: Any
) -> 'Model'

Instance variables

fingerprint

Methods

check_annotation_compatibility

def check_annotation_compatibility(
    self,
    annotation: 'Any'
) -> 'None'
View Source
    def check_annotation_compatibility(self, annotation: Any) -> None:

        # We're only checking the root annotation (lenient_issubclass ignores Annotated anyway), so

        # tidy up the value to improve error messages.

        annotation = discard_Annotated(annotation)

        system_type = self.type_system.to_system(self.type, hints={})

        if not (

            lenient_issubclass(system_type, annotation)

            or lenient_issubclass(type(system_type), annotation)

        ):

            raise ValueError(f"{annotation} cannot be used to represent {self.type}")

check_artifact_compatibility

def check_artifact_compatibility(
    self,
    artifact: 'Artifact'
) -> 'None'
View Source
    def check_artifact_compatibility(self, artifact: Artifact) -> None:

        if not isinstance(artifact, self.artifact_class):

            raise ValueError(f"expected an instance of {self.artifact_class}, got {type(artifact)}")

        self._check_type_compatibility(view_type=self.type, artifact_type=artifact.type)

        if self.mode in {"READ", "READWRITE"}:

            io._read.lookup(

                type(artifact.type),

                type(artifact.format),

                list[artifact.storage.storage_partition_type],  # type: ignore[name-defined]

                type(self),

            )

        if self.mode in {"WRITE", "READWRITE"}:

            io._write.lookup(

                self.python_type,

                type(artifact.type),

                type(artifact.format),

                artifact.storage.storage_partition_type,

                type(self),

            )

copy

def copy(
    self,
    *,
    deep: 'bool' = False,
    validate: 'bool' = True,
    **kwargs: 'Any'
) -> 'Self'

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters:

Name Type Description Default
include None fields to include in new model None
exclude None fields to exclude from new model, as with values this takes precedence over include None
update None values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data None
deep None set to True to make a deep copy of the model None

Returns:

Type Description
None new model instance
View Source
    def copy(self, *, deep: bool = False, validate: bool = True, **kwargs: Any) -> Self:

        copy = super().copy(deep=deep, **kwargs)

        if validate:

            # NOTE: We set exclude_unset=False so that all existing defaulted fields are reused (as

            # is normal `.copy` behavior).

            #

            # To reduce `repr` noise, we'll reset .__fields_set__ to those of the pre-validation copy

            # (which includes those originally set + updated).

            fields_set = copy.__fields_set__

            copy = copy.validate(

                dict(copy._iter(to_dict=False, by_alias=False, exclude_unset=False))

            )

            # Use object.__setattr__ to bypass frozen model assignment errors

            object.__setattr__(copy, "__fields_set__", set(fields_set))

            # Copy over the private attributes, which are missing after validation (since we're only

            # passing the fields).

            for name in self.__private_attributes__:

                if (value := getattr(self, name, Undefined)) is not Undefined:

                    if deep:

                        value = deepcopy(value)

                    object.__setattr__(copy, name, value)

        return copy

dict

def dict(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False
) -> 'DictStrAny'

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

json

def json(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any
) -> 'unicode'

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

Float

class Float(
    __pydantic_self__,
    **data: Any
)
View Source
class Float(PythonBuiltin):

    python_type = float

Ancestors (in MRO)

  • arti.views.python.PythonBuiltin
  • arti.views.View
  • arti.internal.models.Model
  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

Config
priority
python_type
type_system

Static methods

construct

def construct(
    _fields_set: Optional[ForwardRef('SetStr')] = None,
    **values: Any
) -> 'Model'

Creates a new model setting dict and fields_set from trusted or pre-validated data.

Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

from_annotation

def from_annotation(
    annotation: 'Any',
    *,
    mode: 'MODE'
) -> 'View'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def from_annotation(cls, annotation: Any, *, mode: MODE) -> View:

        view_class = cls.get_class_for(annotation)

        view = view_class(mode=mode, **cls._get_kwargs_from_annotation(annotation))

        view.check_annotation_compatibility(annotation)

        return view

from_orm

def from_orm(
    obj: Any
) -> 'Model'

get_class_for

def get_class_for(
    annotation: 'Any'
) -> 'builtins.type[View]'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def get_class_for(cls, annotation: Any) -> builtins.type[View]:

        view_class = get_item_from_annotated(annotation, cls, is_subclass=True)

        if view_class is None:

            # We've already searched for a View instance in the original Annotated args, so just

            # extract the root annotation.

            annotation = discard_Annotated(annotation)

            # Import the View submodules to trigger registration.

            import_submodules(__path__, __name__)

            view_class = cls._by_python_type_.get(annotation)

            # If no match and the type is a subscripted Generic (eg: `list[int]`), try to unwrap any

            # extra type variables.

            if view_class is None and (origin := get_origin(annotation)) is not None:

                view_class = cls._by_python_type_.get(origin)

            if view_class is None:

                raise ValueError(

                    f"{annotation} cannot be matched to a View, try setting one explicitly (eg: `Annotated[int, arti.views.python.Int]`)"

                )

        return view_class

parse_file

def parse_file(
    path: Union[str, pathlib.Path],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

parse_obj

def parse_obj(
    obj: Any
) -> 'Model'

parse_raw

def parse_raw(
    b: Union[str, bytes],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

schema

def schema(
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}'
) -> 'DictStrAny'

schema_json

def schema_json(
    *,
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}',
    **dumps_kwargs: Any
) -> 'unicode'

update_forward_refs

def update_forward_refs(
    **localns: Any
) -> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate

def validate(
    value: Any
) -> 'Model'

Instance variables

fingerprint

Methods

check_annotation_compatibility

def check_annotation_compatibility(
    self,
    annotation: 'Any'
) -> 'None'
View Source
    def check_annotation_compatibility(self, annotation: Any) -> None:

        # We're only checking the root annotation (lenient_issubclass ignores Annotated anyway), so

        # tidy up the value to improve error messages.

        annotation = discard_Annotated(annotation)

        system_type = self.type_system.to_system(self.type, hints={})

        if not (

            lenient_issubclass(system_type, annotation)

            or lenient_issubclass(type(system_type), annotation)

        ):

            raise ValueError(f"{annotation} cannot be used to represent {self.type}")

check_artifact_compatibility

def check_artifact_compatibility(
    self,
    artifact: 'Artifact'
) -> 'None'
View Source
    def check_artifact_compatibility(self, artifact: Artifact) -> None:

        if not isinstance(artifact, self.artifact_class):

            raise ValueError(f"expected an instance of {self.artifact_class}, got {type(artifact)}")

        self._check_type_compatibility(view_type=self.type, artifact_type=artifact.type)

        if self.mode in {"READ", "READWRITE"}:

            io._read.lookup(

                type(artifact.type),

                type(artifact.format),

                list[artifact.storage.storage_partition_type],  # type: ignore[name-defined]

                type(self),

            )

        if self.mode in {"WRITE", "READWRITE"}:

            io._write.lookup(

                self.python_type,

                type(artifact.type),

                type(artifact.format),

                artifact.storage.storage_partition_type,

                type(self),

            )

copy

def copy(
    self,
    *,
    deep: 'bool' = False,
    validate: 'bool' = True,
    **kwargs: 'Any'
) -> 'Self'

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters:

Name Type Description Default
include None fields to include in new model None
exclude None fields to exclude from new model, as with values this takes precedence over include None
update None values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data None
deep None set to True to make a deep copy of the model None

Returns:

Type Description
None new model instance
View Source
    def copy(self, *, deep: bool = False, validate: bool = True, **kwargs: Any) -> Self:

        copy = super().copy(deep=deep, **kwargs)

        if validate:

            # NOTE: We set exclude_unset=False so that all existing defaulted fields are reused (as

            # is normal `.copy` behavior).

            #

            # To reduce `repr` noise, we'll reset .__fields_set__ to those of the pre-validation copy

            # (which includes those originally set + updated).

            fields_set = copy.__fields_set__

            copy = copy.validate(

                dict(copy._iter(to_dict=False, by_alias=False, exclude_unset=False))

            )

            # Use object.__setattr__ to bypass frozen model assignment errors

            object.__setattr__(copy, "__fields_set__", set(fields_set))

            # Copy over the private attributes, which are missing after validation (since we're only

            # passing the fields).

            for name in self.__private_attributes__:

                if (value := getattr(self, name, Undefined)) is not Undefined:

                    if deep:

                        value = deepcopy(value)

                    object.__setattr__(copy, name, value)

        return copy

dict

def dict(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False
) -> 'DictStrAny'

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

json

def json(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any
) -> 'unicode'

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

Int

class Int(
    __pydantic_self__,
    **data: Any
)
View Source
class Int(PythonBuiltin):

    python_type = int

Ancestors (in MRO)

  • arti.views.python.PythonBuiltin
  • arti.views.View
  • arti.internal.models.Model
  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

Config
priority
python_type
type_system

Static methods

construct

def construct(
    _fields_set: Optional[ForwardRef('SetStr')] = None,
    **values: Any
) -> 'Model'

Creates a new model setting dict and fields_set from trusted or pre-validated data.

Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

from_annotation

def from_annotation(
    annotation: 'Any',
    *,
    mode: 'MODE'
) -> 'View'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def from_annotation(cls, annotation: Any, *, mode: MODE) -> View:

        view_class = cls.get_class_for(annotation)

        view = view_class(mode=mode, **cls._get_kwargs_from_annotation(annotation))

        view.check_annotation_compatibility(annotation)

        return view

from_orm

def from_orm(
    obj: Any
) -> 'Model'

get_class_for

def get_class_for(
    annotation: 'Any'
) -> 'builtins.type[View]'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def get_class_for(cls, annotation: Any) -> builtins.type[View]:

        view_class = get_item_from_annotated(annotation, cls, is_subclass=True)

        if view_class is None:

            # We've already searched for a View instance in the original Annotated args, so just

            # extract the root annotation.

            annotation = discard_Annotated(annotation)

            # Import the View submodules to trigger registration.

            import_submodules(__path__, __name__)

            view_class = cls._by_python_type_.get(annotation)

            # If no match and the type is a subscripted Generic (eg: `list[int]`), try to unwrap any

            # extra type variables.

            if view_class is None and (origin := get_origin(annotation)) is not None:

                view_class = cls._by_python_type_.get(origin)

            if view_class is None:

                raise ValueError(

                    f"{annotation} cannot be matched to a View, try setting one explicitly (eg: `Annotated[int, arti.views.python.Int]`)"

                )

        return view_class

parse_file

def parse_file(
    path: Union[str, pathlib.Path],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

parse_obj

def parse_obj(
    obj: Any
) -> 'Model'

parse_raw

def parse_raw(
    b: Union[str, bytes],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

schema

def schema(
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}'
) -> 'DictStrAny'

schema_json

def schema_json(
    *,
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}',
    **dumps_kwargs: Any
) -> 'unicode'

update_forward_refs

def update_forward_refs(
    **localns: Any
) -> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate

def validate(
    value: Any
) -> 'Model'

Instance variables

fingerprint

Methods

check_annotation_compatibility

def check_annotation_compatibility(
    self,
    annotation: 'Any'
) -> 'None'
View Source
    def check_annotation_compatibility(self, annotation: Any) -> None:

        # We're only checking the root annotation (lenient_issubclass ignores Annotated anyway), so

        # tidy up the value to improve error messages.

        annotation = discard_Annotated(annotation)

        system_type = self.type_system.to_system(self.type, hints={})

        if not (

            lenient_issubclass(system_type, annotation)

            or lenient_issubclass(type(system_type), annotation)

        ):

            raise ValueError(f"{annotation} cannot be used to represent {self.type}")

check_artifact_compatibility

def check_artifact_compatibility(
    self,
    artifact: 'Artifact'
) -> 'None'
View Source
    def check_artifact_compatibility(self, artifact: Artifact) -> None:

        if not isinstance(artifact, self.artifact_class):

            raise ValueError(f"expected an instance of {self.artifact_class}, got {type(artifact)}")

        self._check_type_compatibility(view_type=self.type, artifact_type=artifact.type)

        if self.mode in {"READ", "READWRITE"}:

            io._read.lookup(

                type(artifact.type),

                type(artifact.format),

                list[artifact.storage.storage_partition_type],  # type: ignore[name-defined]

                type(self),

            )

        if self.mode in {"WRITE", "READWRITE"}:

            io._write.lookup(

                self.python_type,

                type(artifact.type),

                type(artifact.format),

                artifact.storage.storage_partition_type,

                type(self),

            )

copy

def copy(
    self,
    *,
    deep: 'bool' = False,
    validate: 'bool' = True,
    **kwargs: 'Any'
) -> 'Self'

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters:

Name Type Description Default
include None fields to include in new model None
exclude None fields to exclude from new model, as with values this takes precedence over include None
update None values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data None
deep None set to True to make a deep copy of the model None

Returns:

Type Description
None new model instance
View Source
    def copy(self, *, deep: bool = False, validate: bool = True, **kwargs: Any) -> Self:

        copy = super().copy(deep=deep, **kwargs)

        if validate:

            # NOTE: We set exclude_unset=False so that all existing defaulted fields are reused (as

            # is normal `.copy` behavior).

            #

            # To reduce `repr` noise, we'll reset .__fields_set__ to those of the pre-validation copy

            # (which includes those originally set + updated).

            fields_set = copy.__fields_set__

            copy = copy.validate(

                dict(copy._iter(to_dict=False, by_alias=False, exclude_unset=False))

            )

            # Use object.__setattr__ to bypass frozen model assignment errors

            object.__setattr__(copy, "__fields_set__", set(fields_set))

            # Copy over the private attributes, which are missing after validation (since we're only

            # passing the fields).

            for name in self.__private_attributes__:

                if (value := getattr(self, name, Undefined)) is not Undefined:

                    if deep:

                        value = deepcopy(value)

                    object.__setattr__(copy, name, value)

        return copy

dict

def dict(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False
) -> 'DictStrAny'

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

json

def json(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any
) -> 'unicode'

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

List

class List(
    __pydantic_self__,
    **data: Any
)
View Source
class List(PythonBuiltin):

    python_type = list

Ancestors (in MRO)

  • arti.views.python.PythonBuiltin
  • arti.views.View
  • arti.internal.models.Model
  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

Config
priority
python_type
type_system

Static methods

construct

def construct(
    _fields_set: Optional[ForwardRef('SetStr')] = None,
    **values: Any
) -> 'Model'

Creates a new model setting dict and fields_set from trusted or pre-validated data.

Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

from_annotation

def from_annotation(
    annotation: 'Any',
    *,
    mode: 'MODE'
) -> 'View'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def from_annotation(cls, annotation: Any, *, mode: MODE) -> View:

        view_class = cls.get_class_for(annotation)

        view = view_class(mode=mode, **cls._get_kwargs_from_annotation(annotation))

        view.check_annotation_compatibility(annotation)

        return view

from_orm

def from_orm(
    obj: Any
) -> 'Model'

get_class_for

def get_class_for(
    annotation: 'Any'
) -> 'builtins.type[View]'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def get_class_for(cls, annotation: Any) -> builtins.type[View]:

        view_class = get_item_from_annotated(annotation, cls, is_subclass=True)

        if view_class is None:

            # We've already searched for a View instance in the original Annotated args, so just

            # extract the root annotation.

            annotation = discard_Annotated(annotation)

            # Import the View submodules to trigger registration.

            import_submodules(__path__, __name__)

            view_class = cls._by_python_type_.get(annotation)

            # If no match and the type is a subscripted Generic (eg: `list[int]`), try to unwrap any

            # extra type variables.

            if view_class is None and (origin := get_origin(annotation)) is not None:

                view_class = cls._by_python_type_.get(origin)

            if view_class is None:

                raise ValueError(

                    f"{annotation} cannot be matched to a View, try setting one explicitly (eg: `Annotated[int, arti.views.python.Int]`)"

                )

        return view_class

parse_file

def parse_file(
    path: Union[str, pathlib.Path],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

parse_obj

def parse_obj(
    obj: Any
) -> 'Model'

parse_raw

def parse_raw(
    b: Union[str, bytes],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

schema

def schema(
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}'
) -> 'DictStrAny'

schema_json

def schema_json(
    *,
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}',
    **dumps_kwargs: Any
) -> 'unicode'

update_forward_refs

def update_forward_refs(
    **localns: Any
) -> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate

def validate(
    value: Any
) -> 'Model'

Instance variables

fingerprint

Methods

check_annotation_compatibility

def check_annotation_compatibility(
    self,
    annotation: 'Any'
) -> 'None'
View Source
    def check_annotation_compatibility(self, annotation: Any) -> None:

        # We're only checking the root annotation (lenient_issubclass ignores Annotated anyway), so

        # tidy up the value to improve error messages.

        annotation = discard_Annotated(annotation)

        system_type = self.type_system.to_system(self.type, hints={})

        if not (

            lenient_issubclass(system_type, annotation)

            or lenient_issubclass(type(system_type), annotation)

        ):

            raise ValueError(f"{annotation} cannot be used to represent {self.type}")

check_artifact_compatibility

def check_artifact_compatibility(
    self,
    artifact: 'Artifact'
) -> 'None'
View Source
    def check_artifact_compatibility(self, artifact: Artifact) -> None:

        if not isinstance(artifact, self.artifact_class):

            raise ValueError(f"expected an instance of {self.artifact_class}, got {type(artifact)}")

        self._check_type_compatibility(view_type=self.type, artifact_type=artifact.type)

        if self.mode in {"READ", "READWRITE"}:

            io._read.lookup(

                type(artifact.type),

                type(artifact.format),

                list[artifact.storage.storage_partition_type],  # type: ignore[name-defined]

                type(self),

            )

        if self.mode in {"WRITE", "READWRITE"}:

            io._write.lookup(

                self.python_type,

                type(artifact.type),

                type(artifact.format),

                artifact.storage.storage_partition_type,

                type(self),

            )

copy

def copy(
    self,
    *,
    deep: 'bool' = False,
    validate: 'bool' = True,
    **kwargs: 'Any'
) -> 'Self'

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters:

Name Type Description Default
include None fields to include in new model None
exclude None fields to exclude from new model, as with values this takes precedence over include None
update None values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data None
deep None set to True to make a deep copy of the model None

Returns:

Type Description
None new model instance
View Source
    def copy(self, *, deep: bool = False, validate: bool = True, **kwargs: Any) -> Self:

        copy = super().copy(deep=deep, **kwargs)

        if validate:

            # NOTE: We set exclude_unset=False so that all existing defaulted fields are reused (as

            # is normal `.copy` behavior).

            #

            # To reduce `repr` noise, we'll reset .__fields_set__ to those of the pre-validation copy

            # (which includes those originally set + updated).

            fields_set = copy.__fields_set__

            copy = copy.validate(

                dict(copy._iter(to_dict=False, by_alias=False, exclude_unset=False))

            )

            # Use object.__setattr__ to bypass frozen model assignment errors

            object.__setattr__(copy, "__fields_set__", set(fields_set))

            # Copy over the private attributes, which are missing after validation (since we're only

            # passing the fields).

            for name in self.__private_attributes__:

                if (value := getattr(self, name, Undefined)) is not Undefined:

                    if deep:

                        value = deepcopy(value)

                    object.__setattr__(copy, name, value)

        return copy

dict

def dict(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False
) -> 'DictStrAny'

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

json

def json(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any
) -> 'unicode'

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

Null

class Null(
    __pydantic_self__,
    **data: Any
)
View Source
class Null(PythonBuiltin):

    python_type = None

Ancestors (in MRO)

  • arti.views.python.PythonBuiltin
  • arti.views.View
  • arti.internal.models.Model
  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

Config
priority
python_type
type_system

Static methods

construct

def construct(
    _fields_set: Optional[ForwardRef('SetStr')] = None,
    **values: Any
) -> 'Model'

Creates a new model setting dict and fields_set from trusted or pre-validated data.

Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

from_annotation

def from_annotation(
    annotation: 'Any',
    *,
    mode: 'MODE'
) -> 'View'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def from_annotation(cls, annotation: Any, *, mode: MODE) -> View:

        view_class = cls.get_class_for(annotation)

        view = view_class(mode=mode, **cls._get_kwargs_from_annotation(annotation))

        view.check_annotation_compatibility(annotation)

        return view

from_orm

def from_orm(
    obj: Any
) -> 'Model'

get_class_for

def get_class_for(
    annotation: 'Any'
) -> 'builtins.type[View]'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def get_class_for(cls, annotation: Any) -> builtins.type[View]:

        view_class = get_item_from_annotated(annotation, cls, is_subclass=True)

        if view_class is None:

            # We've already searched for a View instance in the original Annotated args, so just

            # extract the root annotation.

            annotation = discard_Annotated(annotation)

            # Import the View submodules to trigger registration.

            import_submodules(__path__, __name__)

            view_class = cls._by_python_type_.get(annotation)

            # If no match and the type is a subscripted Generic (eg: `list[int]`), try to unwrap any

            # extra type variables.

            if view_class is None and (origin := get_origin(annotation)) is not None:

                view_class = cls._by_python_type_.get(origin)

            if view_class is None:

                raise ValueError(

                    f"{annotation} cannot be matched to a View, try setting one explicitly (eg: `Annotated[int, arti.views.python.Int]`)"

                )

        return view_class

parse_file

def parse_file(
    path: Union[str, pathlib.Path],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

parse_obj

def parse_obj(
    obj: Any
) -> 'Model'

parse_raw

def parse_raw(
    b: Union[str, bytes],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

schema

def schema(
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}'
) -> 'DictStrAny'

schema_json

def schema_json(
    *,
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}',
    **dumps_kwargs: Any
) -> 'unicode'

update_forward_refs

def update_forward_refs(
    **localns: Any
) -> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate

def validate(
    value: Any
) -> 'Model'

Instance variables

fingerprint

Methods

check_annotation_compatibility

def check_annotation_compatibility(
    self,
    annotation: 'Any'
) -> 'None'
View Source
    def check_annotation_compatibility(self, annotation: Any) -> None:

        # We're only checking the root annotation (lenient_issubclass ignores Annotated anyway), so

        # tidy up the value to improve error messages.

        annotation = discard_Annotated(annotation)

        system_type = self.type_system.to_system(self.type, hints={})

        if not (

            lenient_issubclass(system_type, annotation)

            or lenient_issubclass(type(system_type), annotation)

        ):

            raise ValueError(f"{annotation} cannot be used to represent {self.type}")

check_artifact_compatibility

def check_artifact_compatibility(
    self,
    artifact: 'Artifact'
) -> 'None'
View Source
    def check_artifact_compatibility(self, artifact: Artifact) -> None:

        if not isinstance(artifact, self.artifact_class):

            raise ValueError(f"expected an instance of {self.artifact_class}, got {type(artifact)}")

        self._check_type_compatibility(view_type=self.type, artifact_type=artifact.type)

        if self.mode in {"READ", "READWRITE"}:

            io._read.lookup(

                type(artifact.type),

                type(artifact.format),

                list[artifact.storage.storage_partition_type],  # type: ignore[name-defined]

                type(self),

            )

        if self.mode in {"WRITE", "READWRITE"}:

            io._write.lookup(

                self.python_type,

                type(artifact.type),

                type(artifact.format),

                artifact.storage.storage_partition_type,

                type(self),

            )

copy

def copy(
    self,
    *,
    deep: 'bool' = False,
    validate: 'bool' = True,
    **kwargs: 'Any'
) -> 'Self'

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters:

Name Type Description Default
include None fields to include in new model None
exclude None fields to exclude from new model, as with values this takes precedence over include None
update None values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data None
deep None set to True to make a deep copy of the model None

Returns:

Type Description
None new model instance
View Source
    def copy(self, *, deep: bool = False, validate: bool = True, **kwargs: Any) -> Self:

        copy = super().copy(deep=deep, **kwargs)

        if validate:

            # NOTE: We set exclude_unset=False so that all existing defaulted fields are reused (as

            # is normal `.copy` behavior).

            #

            # To reduce `repr` noise, we'll reset .__fields_set__ to those of the pre-validation copy

            # (which includes those originally set + updated).

            fields_set = copy.__fields_set__

            copy = copy.validate(

                dict(copy._iter(to_dict=False, by_alias=False, exclude_unset=False))

            )

            # Use object.__setattr__ to bypass frozen model assignment errors

            object.__setattr__(copy, "__fields_set__", set(fields_set))

            # Copy over the private attributes, which are missing after validation (since we're only

            # passing the fields).

            for name in self.__private_attributes__:

                if (value := getattr(self, name, Undefined)) is not Undefined:

                    if deep:

                        value = deepcopy(value)

                    object.__setattr__(copy, name, value)

        return copy

dict

def dict(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False
) -> 'DictStrAny'

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

json

def json(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any
) -> 'unicode'

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

PythonBuiltin

class PythonBuiltin(
    __pydantic_self__,
    **data: Any
)
View Source
class PythonBuiltin(View):

    _abstract_ = True

    type_system = python_type_system

Ancestors (in MRO)

  • arti.views.View
  • arti.internal.models.Model
  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Descendants

  • arti.views.python.Date
  • arti.views.python.Datetime
  • arti.views.python.Dict
  • arti.views.python.Float
  • arti.views.python.Int
  • arti.views.python.List
  • arti.views.python.Null
  • arti.views.python.Str

Class variables

Config
priority
type_system

Static methods

construct

def construct(
    _fields_set: Optional[ForwardRef('SetStr')] = None,
    **values: Any
) -> 'Model'

Creates a new model setting dict and fields_set from trusted or pre-validated data.

Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

from_annotation

def from_annotation(
    annotation: 'Any',
    *,
    mode: 'MODE'
) -> 'View'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def from_annotation(cls, annotation: Any, *, mode: MODE) -> View:

        view_class = cls.get_class_for(annotation)

        view = view_class(mode=mode, **cls._get_kwargs_from_annotation(annotation))

        view.check_annotation_compatibility(annotation)

        return view

from_orm

def from_orm(
    obj: Any
) -> 'Model'

get_class_for

def get_class_for(
    annotation: 'Any'
) -> 'builtins.type[View]'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def get_class_for(cls, annotation: Any) -> builtins.type[View]:

        view_class = get_item_from_annotated(annotation, cls, is_subclass=True)

        if view_class is None:

            # We've already searched for a View instance in the original Annotated args, so just

            # extract the root annotation.

            annotation = discard_Annotated(annotation)

            # Import the View submodules to trigger registration.

            import_submodules(__path__, __name__)

            view_class = cls._by_python_type_.get(annotation)

            # If no match and the type is a subscripted Generic (eg: `list[int]`), try to unwrap any

            # extra type variables.

            if view_class is None and (origin := get_origin(annotation)) is not None:

                view_class = cls._by_python_type_.get(origin)

            if view_class is None:

                raise ValueError(

                    f"{annotation} cannot be matched to a View, try setting one explicitly (eg: `Annotated[int, arti.views.python.Int]`)"

                )

        return view_class

parse_file

def parse_file(
    path: Union[str, pathlib.Path],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

parse_obj

def parse_obj(
    obj: Any
) -> 'Model'

parse_raw

def parse_raw(
    b: Union[str, bytes],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

schema

def schema(
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}'
) -> 'DictStrAny'

schema_json

def schema_json(
    *,
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}',
    **dumps_kwargs: Any
) -> 'unicode'

update_forward_refs

def update_forward_refs(
    **localns: Any
) -> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate

def validate(
    value: Any
) -> 'Model'

Instance variables

fingerprint

Methods

check_annotation_compatibility

def check_annotation_compatibility(
    self,
    annotation: 'Any'
) -> 'None'
View Source
    def check_annotation_compatibility(self, annotation: Any) -> None:

        # We're only checking the root annotation (lenient_issubclass ignores Annotated anyway), so

        # tidy up the value to improve error messages.

        annotation = discard_Annotated(annotation)

        system_type = self.type_system.to_system(self.type, hints={})

        if not (

            lenient_issubclass(system_type, annotation)

            or lenient_issubclass(type(system_type), annotation)

        ):

            raise ValueError(f"{annotation} cannot be used to represent {self.type}")

check_artifact_compatibility

def check_artifact_compatibility(
    self,
    artifact: 'Artifact'
) -> 'None'
View Source
    def check_artifact_compatibility(self, artifact: Artifact) -> None:

        if not isinstance(artifact, self.artifact_class):

            raise ValueError(f"expected an instance of {self.artifact_class}, got {type(artifact)}")

        self._check_type_compatibility(view_type=self.type, artifact_type=artifact.type)

        if self.mode in {"READ", "READWRITE"}:

            io._read.lookup(

                type(artifact.type),

                type(artifact.format),

                list[artifact.storage.storage_partition_type],  # type: ignore[name-defined]

                type(self),

            )

        if self.mode in {"WRITE", "READWRITE"}:

            io._write.lookup(

                self.python_type,

                type(artifact.type),

                type(artifact.format),

                artifact.storage.storage_partition_type,

                type(self),

            )

copy

def copy(
    self,
    *,
    deep: 'bool' = False,
    validate: 'bool' = True,
    **kwargs: 'Any'
) -> 'Self'

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters:

Name Type Description Default
include None fields to include in new model None
exclude None fields to exclude from new model, as with values this takes precedence over include None
update None values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data None
deep None set to True to make a deep copy of the model None

Returns:

Type Description
None new model instance
View Source
    def copy(self, *, deep: bool = False, validate: bool = True, **kwargs: Any) -> Self:

        copy = super().copy(deep=deep, **kwargs)

        if validate:

            # NOTE: We set exclude_unset=False so that all existing defaulted fields are reused (as

            # is normal `.copy` behavior).

            #

            # To reduce `repr` noise, we'll reset .__fields_set__ to those of the pre-validation copy

            # (which includes those originally set + updated).

            fields_set = copy.__fields_set__

            copy = copy.validate(

                dict(copy._iter(to_dict=False, by_alias=False, exclude_unset=False))

            )

            # Use object.__setattr__ to bypass frozen model assignment errors

            object.__setattr__(copy, "__fields_set__", set(fields_set))

            # Copy over the private attributes, which are missing after validation (since we're only

            # passing the fields).

            for name in self.__private_attributes__:

                if (value := getattr(self, name, Undefined)) is not Undefined:

                    if deep:

                        value = deepcopy(value)

                    object.__setattr__(copy, name, value)

        return copy

dict

def dict(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False
) -> 'DictStrAny'

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

json

def json(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any
) -> 'unicode'

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().

Str

class Str(
    __pydantic_self__,
    **data: Any
)
View Source
class Str(PythonBuiltin):

    python_type = str

Ancestors (in MRO)

  • arti.views.python.PythonBuiltin
  • arti.views.View
  • arti.internal.models.Model
  • pydantic.main.BaseModel
  • pydantic.utils.Representation

Class variables

Config
priority
python_type
type_system

Static methods

construct

def construct(
    _fields_set: Optional[ForwardRef('SetStr')] = None,
    **values: Any
) -> 'Model'

Creates a new model setting dict and fields_set from trusted or pre-validated data.

Default values are respected, but no other validation is performed. Behaves as if Config.extra = 'allow' was set since it adds all passed values

from_annotation

def from_annotation(
    annotation: 'Any',
    *,
    mode: 'MODE'
) -> 'View'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def from_annotation(cls, annotation: Any, *, mode: MODE) -> View:

        view_class = cls.get_class_for(annotation)

        view = view_class(mode=mode, **cls._get_kwargs_from_annotation(annotation))

        view.check_annotation_compatibility(annotation)

        return view

from_orm

def from_orm(
    obj: Any
) -> 'Model'

get_class_for

def get_class_for(
    annotation: 'Any'
) -> 'builtins.type[View]'
View Source
    @classmethod  # TODO: Use typing.Self for return, pending mypy support

    def get_class_for(cls, annotation: Any) -> builtins.type[View]:

        view_class = get_item_from_annotated(annotation, cls, is_subclass=True)

        if view_class is None:

            # We've already searched for a View instance in the original Annotated args, so just

            # extract the root annotation.

            annotation = discard_Annotated(annotation)

            # Import the View submodules to trigger registration.

            import_submodules(__path__, __name__)

            view_class = cls._by_python_type_.get(annotation)

            # If no match and the type is a subscripted Generic (eg: `list[int]`), try to unwrap any

            # extra type variables.

            if view_class is None and (origin := get_origin(annotation)) is not None:

                view_class = cls._by_python_type_.get(origin)

            if view_class is None:

                raise ValueError(

                    f"{annotation} cannot be matched to a View, try setting one explicitly (eg: `Annotated[int, arti.views.python.Int]`)"

                )

        return view_class

parse_file

def parse_file(
    path: Union[str, pathlib.Path],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

parse_obj

def parse_obj(
    obj: Any
) -> 'Model'

parse_raw

def parse_raw(
    b: Union[str, bytes],
    *,
    content_type: 'unicode' = None,
    encoding: 'unicode' = 'utf8',
    proto: pydantic.parse.Protocol = None,
    allow_pickle: bool = False
) -> 'Model'

schema

def schema(
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}'
) -> 'DictStrAny'

schema_json

def schema_json(
    *,
    by_alias: bool = True,
    ref_template: 'unicode' = '#/definitions/{model}',
    **dumps_kwargs: Any
) -> 'unicode'

update_forward_refs

def update_forward_refs(
    **localns: Any
) -> None

Try to update ForwardRefs on fields based on this Model, globalns and localns.

validate

def validate(
    value: Any
) -> 'Model'

Instance variables

fingerprint

Methods

check_annotation_compatibility

def check_annotation_compatibility(
    self,
    annotation: 'Any'
) -> 'None'
View Source
    def check_annotation_compatibility(self, annotation: Any) -> None:

        # We're only checking the root annotation (lenient_issubclass ignores Annotated anyway), so

        # tidy up the value to improve error messages.

        annotation = discard_Annotated(annotation)

        system_type = self.type_system.to_system(self.type, hints={})

        if not (

            lenient_issubclass(system_type, annotation)

            or lenient_issubclass(type(system_type), annotation)

        ):

            raise ValueError(f"{annotation} cannot be used to represent {self.type}")

check_artifact_compatibility

def check_artifact_compatibility(
    self,
    artifact: 'Artifact'
) -> 'None'
View Source
    def check_artifact_compatibility(self, artifact: Artifact) -> None:

        if not isinstance(artifact, self.artifact_class):

            raise ValueError(f"expected an instance of {self.artifact_class}, got {type(artifact)}")

        self._check_type_compatibility(view_type=self.type, artifact_type=artifact.type)

        if self.mode in {"READ", "READWRITE"}:

            io._read.lookup(

                type(artifact.type),

                type(artifact.format),

                list[artifact.storage.storage_partition_type],  # type: ignore[name-defined]

                type(self),

            )

        if self.mode in {"WRITE", "READWRITE"}:

            io._write.lookup(

                self.python_type,

                type(artifact.type),

                type(artifact.format),

                artifact.storage.storage_partition_type,

                type(self),

            )

copy

def copy(
    self,
    *,
    deep: 'bool' = False,
    validate: 'bool' = True,
    **kwargs: 'Any'
) -> 'Self'

Duplicate a model, optionally choose which fields to include, exclude and change.

Parameters:

Name Type Description Default
include None fields to include in new model None
exclude None fields to exclude from new model, as with values this takes precedence over include None
update None values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data None
deep None set to True to make a deep copy of the model None

Returns:

Type Description
None new model instance
View Source
    def copy(self, *, deep: bool = False, validate: bool = True, **kwargs: Any) -> Self:

        copy = super().copy(deep=deep, **kwargs)

        if validate:

            # NOTE: We set exclude_unset=False so that all existing defaulted fields are reused (as

            # is normal `.copy` behavior).

            #

            # To reduce `repr` noise, we'll reset .__fields_set__ to those of the pre-validation copy

            # (which includes those originally set + updated).

            fields_set = copy.__fields_set__

            copy = copy.validate(

                dict(copy._iter(to_dict=False, by_alias=False, exclude_unset=False))

            )

            # Use object.__setattr__ to bypass frozen model assignment errors

            object.__setattr__(copy, "__fields_set__", set(fields_set))

            # Copy over the private attributes, which are missing after validation (since we're only

            # passing the fields).

            for name in self.__private_attributes__:

                if (value := getattr(self, name, Undefined)) is not Undefined:

                    if deep:

                        value = deepcopy(value)

                    object.__setattr__(copy, name, value)

        return copy

dict

def dict(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False
) -> 'DictStrAny'

Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.

json

def json(
    self,
    *,
    include: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    exclude: Union[ForwardRef('AbstractSetIntStr'), ForwardRef('MappingIntStrAny'), NoneType] = None,
    by_alias: bool = False,
    skip_defaults: Optional[bool] = None,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    encoder: Optional[Callable[[Any], Any]] = None,
    models_as_dict: bool = True,
    **dumps_kwargs: Any
) -> 'unicode'

Generate a JSON representation of the model, include and exclude arguments as per dict().

encoder is an optional function to supply as default to json.dumps(), other arguments as per json.dumps().