Source code for openapi_client.models.datasource_flows_details

# coding: utf-8

"""
    Amorphic Data Platform

    Amorphic Data Platform - API Definition documentation

    The version of the OpenAPI document: 1.0
    Generated by OpenAPI Generator (https://openapi-generator.tech)

    Do not edit the class manually.
"""  # noqa: E501


from __future__ import annotations
import pprint
import re  # noqa: F401
import json

from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
from typing import Any, ClassVar, Dict, List, Optional
from openapi_client.models.datasource_flows_details_data_tranformation_confirguration import DatasourceFlowsDetailsDataTranformationConfirguration
from openapi_client.models.datasource_flows_details_dataflow_config import DatasourceFlowsDetailsDataflowConfig
from openapi_client.models.datasource_flows_details_dataset_details_inner import DatasourceFlowsDetailsDatasetDetailsInner
from openapi_client.models.datasource_flows_details_items_config_inner import DatasourceFlowsDetailsItemsConfigInner
from typing import Optional, Set
from typing_extensions import Self

[docs] class DatasourceFlowsDetails(BaseModel): """ DatasourceFlowsDetails """ # noqa: E501 dataflow_type: Optional[StrictStr] = Field(default=None, alias="DataflowType") dataflow_id: Optional[StrictStr] = Field(default=None, alias="DataflowId") ingestion_type: Optional[StrictStr] = Field(default=None, alias="IngestionType") dataflow_name: Optional[StrictStr] = Field(default=None, alias="DataflowName") datasource_id: Optional[StrictStr] = Field(default=None, alias="DatasourceId") dataflow_status: Optional[StrictStr] = Field(default=None, alias="DataflowStatus") process_type: Optional[StrictStr] = Field(default=None, alias="ProcessType") cdc_tracking_method: Optional[StrictStr] = Field(default=None, alias="CDCTrackingMethod") target_location: Optional[StrictStr] = Field(default=None, alias="TargetLocation") create_dataset: Optional[StrictBool] = Field(default=None, alias="CreateDataset") dataset_details: Optional[List[DatasourceFlowsDetailsDatasetDetailsInner]] = Field(default=None, alias="DatasetDetails") message: Optional[StrictStr] = Field(default=None, alias="Message") data_format: Optional[StrictStr] = Field(default=None, alias="DataFormat") creation_time: Optional[StrictStr] = Field(default=None, alias="CreationTime") created_by: Optional[StrictStr] = Field(default=None, alias="CreatedBy") last_modified_by: Optional[StrictStr] = Field(default=None, alias="LastModifiedBy") last_modified_time: Optional[StrictStr] = Field(default=None, alias="LastModifiedTime") is_data_transformation_enabled: Optional[StrictBool] = Field(default=None, alias="IsDataTransformationEnabled") items_config: Optional[List[DatasourceFlowsDetailsItemsConfigInner]] = Field(default=None, description="Configuration for ArcGIS items to be processed", alias="ItemsConfig") schedules: Optional[List[Any]] = Field(default=None, alias="Schedules") registered_datasets: Optional[List[Any]] = Field(default=None, alias="RegisteredDatasets") datasource_name: Optional[StrictStr] = Field(default=None, alias="DatasourceName") file_type: Optional[StrictStr] = Field(default=None, alias="FileType") resource_origin: Optional[StrictStr] = Field(default=None, alias="ResourceOrigin") dataflow_config: Optional[DatasourceFlowsDetailsDataflowConfig] = Field(default=None, alias="DataflowConfig") data_tranformation_confirguration: Optional[DatasourceFlowsDetailsDataTranformationConfirguration] = Field(default=None, alias="DataTranformationConfirguration") __properties: ClassVar[List[str]] = ["DataflowType", "DataflowId", "IngestionType", "DataflowName", "DatasourceId", "DataflowStatus", "ProcessType", "CDCTrackingMethod", "TargetLocation", "CreateDataset", "DatasetDetails", "Message", "DataFormat", "CreationTime", "CreatedBy", "LastModifiedBy", "LastModifiedTime", "IsDataTransformationEnabled", "ItemsConfig", "Schedules", "RegisteredDatasets", "DatasourceName", "FileType", "ResourceOrigin", "DataflowConfig", "DataTranformationConfirguration"] model_config = ConfigDict( populate_by_name=True, validate_assignment=True, protected_namespaces=(), )
[docs] def to_str(self) -> str: """Returns the string representation of the model using alias""" return pprint.pformat(self.model_dump(by_alias=True))
[docs] def to_json(self) -> str: """Returns the JSON representation of the model using alias""" # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict())
[docs] @classmethod def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DatasourceFlowsDetails from a JSON string""" return cls.from_dict(json.loads(json_str))
[docs] def to_dict(self) -> Dict[str, Any]: """Return the dictionary representation of the model using alias. This has the following differences from calling pydantic's `self.model_dump(by_alias=True)`: * `None` is only added to the output dict for nullable fields that were set at model initialization. Other fields with value `None` are ignored. """ excluded_fields: Set[str] = set([ ]) _dict = self.model_dump( by_alias=True, exclude=excluded_fields, exclude_none=True, ) # override the default output from pydantic by calling `to_dict()` of each item in dataset_details (list) _items = [] if self.dataset_details: for _item_dataset_details in self.dataset_details: if _item_dataset_details: _items.append(_item_dataset_details.to_dict()) _dict['DatasetDetails'] = _items # override the default output from pydantic by calling `to_dict()` of each item in items_config (list) _items = [] if self.items_config: for _item_items_config in self.items_config: if _item_items_config: _items.append(_item_items_config.to_dict()) _dict['ItemsConfig'] = _items # override the default output from pydantic by calling `to_dict()` of dataflow_config if self.dataflow_config: _dict['DataflowConfig'] = self.dataflow_config.to_dict() # override the default output from pydantic by calling `to_dict()` of data_tranformation_confirguration if self.data_tranformation_confirguration: _dict['DataTranformationConfirguration'] = self.data_tranformation_confirguration.to_dict() return _dict
[docs] @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DatasourceFlowsDetails from a dict""" if obj is None: return None if not isinstance(obj, dict): return cls.model_validate(obj) _obj = cls.model_validate({ "DataflowType": obj.get("DataflowType"), "DataflowId": obj.get("DataflowId"), "IngestionType": obj.get("IngestionType"), "DataflowName": obj.get("DataflowName"), "DatasourceId": obj.get("DatasourceId"), "DataflowStatus": obj.get("DataflowStatus"), "ProcessType": obj.get("ProcessType"), "CDCTrackingMethod": obj.get("CDCTrackingMethod"), "TargetLocation": obj.get("TargetLocation"), "CreateDataset": obj.get("CreateDataset"), "DatasetDetails": [DatasourceFlowsDetailsDatasetDetailsInner.from_dict(_item) for _item in obj["DatasetDetails"]] if obj.get("DatasetDetails") is not None else None, "Message": obj.get("Message"), "DataFormat": obj.get("DataFormat"), "CreationTime": obj.get("CreationTime"), "CreatedBy": obj.get("CreatedBy"), "LastModifiedBy": obj.get("LastModifiedBy"), "LastModifiedTime": obj.get("LastModifiedTime"), "IsDataTransformationEnabled": obj.get("IsDataTransformationEnabled"), "ItemsConfig": [DatasourceFlowsDetailsItemsConfigInner.from_dict(_item) for _item in obj["ItemsConfig"]] if obj.get("ItemsConfig") is not None else None, "Schedules": obj.get("Schedules"), "RegisteredDatasets": obj.get("RegisteredDatasets"), "DatasourceName": obj.get("DatasourceName"), "FileType": obj.get("FileType"), "ResourceOrigin": obj.get("ResourceOrigin"), "DataflowConfig": DatasourceFlowsDetailsDataflowConfig.from_dict(obj["DataflowConfig"]) if obj.get("DataflowConfig") is not None else None, "DataTranformationConfirguration": DatasourceFlowsDetailsDataTranformationConfirguration.from_dict(obj["DataTranformationConfirguration"]) if obj.get("DataTranformationConfirguration") is not None else None }) return _obj