# coding: utf-8
"""
Amorphic Data Platform
Amorphic Data Platform - API Definition documentation
The version of the OpenAPI document: 1.0
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr
from typing import Any, ClassVar, Dict, List, Optional
from openapi_client.models.data_pipeline_nodes_inner_compute_resource import DataPipelineNodesInnerComputeResource
from openapi_client.models.data_pipeline_nodes_inner_conditions_inner import DataPipelineNodesInnerConditionsInner
from openapi_client.models.data_pipeline_nodes_inner_inputs_inner import DataPipelineNodesInnerInputsInner
from openapi_client.models.data_pipeline_nodes_inner_outputs_inner import DataPipelineNodesInnerOutputsInner
from openapi_client.models.data_pipeline_nodes_inner_resource import DataPipelineNodesInnerResource
from typing import Optional, Set
from typing_extensions import Self
[docs]
class DataPipelineNodesInner(BaseModel):
"""
DataPipelineNodesInner
""" # noqa: E501
module_type: Optional[StrictStr] = Field(default=None, alias="ModuleType")
node_name: Optional[StrictStr] = Field(default=None, alias="NodeName")
resource: Optional[DataPipelineNodesInnerResource] = Field(default=None, alias="Resource")
compute_resource: Optional[DataPipelineNodesInnerComputeResource] = Field(default=None, alias="ComputeResource")
arguments: Optional[Dict[str, Any]] = Field(default=None, alias="Arguments")
source_language_id: Optional[StrictStr] = Field(default=None, alias="SourceLanguageId")
target_language_id: Optional[StrictStr] = Field(default=None, alias="TargetLanguageId")
source_dataset_id: Optional[StrictStr] = Field(default=None, alias="SourceDatasetId")
target_dataset_id: Optional[StrictStr] = Field(default=None, alias="TargetDatasetId")
file_processing_mode: Optional[StrictStr] = Field(default=None, alias="FileProcessingMode")
features: Optional[List[StrictStr]] = Field(default=None, alias="Features")
email_body_execution_property_key: Optional[StrictStr] = Field(default=None, alias="EmailBodyExecutionPropertyKey")
email_subject_execution_property_key: Optional[StrictStr] = Field(default=None, alias="EmailSubjectExecutionPropertyKey")
email_to_execution_property_key: Optional[StrictStr] = Field(default=None, alias="EmailToExecutionPropertyKey")
timeout: Optional[StrictInt] = Field(default=None, alias="Timeout")
concurrency_factor: Optional[StrictInt] = Field(default=None, alias="ConcurrencyFactor")
dataset_domain: Optional[StrictStr] = Field(default=None, alias="DatasetDomain")
sync_all_datasets: Optional[StrictBool] = Field(default=None, alias="SyncAllDatasets")
listof_input_datasets: Optional[List[StrictStr]] = Field(default=None, alias="ListofInputDatasets")
file_names_list: Optional[List[StrictStr]] = Field(default=None, alias="FileNamesList")
from_time: Optional[StrictStr] = Field(default=None, alias="FromTime")
to_time: Optional[StrictStr] = Field(default=None, alias="ToTime")
configuration: Optional[Dict[str, Any]] = Field(default=None, alias="Configuration")
resource_identifier: Optional[StrictStr] = Field(default=None, description="Resource identifier for nodes that require external resources (e.g., KnowledgeBase)", alias="ResourceIdentifier")
inputs: Optional[List[DataPipelineNodesInnerInputsInner]] = Field(default=None, description="Input configurations for AI Flow nodes. Each input has Name and Type.", alias="Inputs")
outputs: Optional[List[DataPipelineNodesInnerOutputsInner]] = Field(default=None, description="Output configurations for AI Flow nodes. Each output has Name and Type.", alias="Outputs")
conditions: Optional[List[DataPipelineNodesInnerConditionsInner]] = Field(default=None, description="Condition configurations for Condition nodes. Each condition has Name and optional Expression (default condition doesn't require Expression).", alias="Conditions")
__properties: ClassVar[List[str]] = ["ModuleType", "NodeName", "Resource", "ComputeResource", "Arguments", "SourceLanguageId", "TargetLanguageId", "SourceDatasetId", "TargetDatasetId", "FileProcessingMode", "Features", "EmailBodyExecutionPropertyKey", "EmailSubjectExecutionPropertyKey", "EmailToExecutionPropertyKey", "Timeout", "ConcurrencyFactor", "DatasetDomain", "SyncAllDatasets", "ListofInputDatasets", "FileNamesList", "FromTime", "ToTime", "Configuration", "ResourceIdentifier", "Inputs", "Outputs", "Conditions"]
model_config = ConfigDict(
populate_by_name=True,
validate_assignment=True,
protected_namespaces=(),
)
[docs]
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.model_dump(by_alias=True))
[docs]
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
# TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
return json.dumps(self.to_dict())
[docs]
@classmethod
def from_json(cls, json_str: str) -> Optional[Self]:
"""Create an instance of DataPipelineNodesInner from a JSON string"""
return cls.from_dict(json.loads(json_str))
[docs]
def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
excluded_fields: Set[str] = set([
])
_dict = self.model_dump(
by_alias=True,
exclude=excluded_fields,
exclude_none=True,
)
# override the default output from pydantic by calling `to_dict()` of resource
if self.resource:
_dict['Resource'] = self.resource.to_dict()
# override the default output from pydantic by calling `to_dict()` of compute_resource
if self.compute_resource:
_dict['ComputeResource'] = self.compute_resource.to_dict()
# override the default output from pydantic by calling `to_dict()` of each item in inputs (list)
_items = []
if self.inputs:
for _item_inputs in self.inputs:
if _item_inputs:
_items.append(_item_inputs.to_dict())
_dict['Inputs'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in outputs (list)
_items = []
if self.outputs:
for _item_outputs in self.outputs:
if _item_outputs:
_items.append(_item_outputs.to_dict())
_dict['Outputs'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in conditions (list)
_items = []
if self.conditions:
for _item_conditions in self.conditions:
if _item_conditions:
_items.append(_item_conditions.to_dict())
_dict['Conditions'] = _items
return _dict
[docs]
@classmethod
def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
"""Create an instance of DataPipelineNodesInner from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return cls.model_validate(obj)
_obj = cls.model_validate({
"ModuleType": obj.get("ModuleType"),
"NodeName": obj.get("NodeName"),
"Resource": DataPipelineNodesInnerResource.from_dict(obj["Resource"]) if obj.get("Resource") is not None else None,
"ComputeResource": DataPipelineNodesInnerComputeResource.from_dict(obj["ComputeResource"]) if obj.get("ComputeResource") is not None else None,
"Arguments": obj.get("Arguments"),
"SourceLanguageId": obj.get("SourceLanguageId"),
"TargetLanguageId": obj.get("TargetLanguageId"),
"SourceDatasetId": obj.get("SourceDatasetId"),
"TargetDatasetId": obj.get("TargetDatasetId"),
"FileProcessingMode": obj.get("FileProcessingMode"),
"Features": obj.get("Features"),
"EmailBodyExecutionPropertyKey": obj.get("EmailBodyExecutionPropertyKey"),
"EmailSubjectExecutionPropertyKey": obj.get("EmailSubjectExecutionPropertyKey"),
"EmailToExecutionPropertyKey": obj.get("EmailToExecutionPropertyKey"),
"Timeout": obj.get("Timeout"),
"ConcurrencyFactor": obj.get("ConcurrencyFactor"),
"DatasetDomain": obj.get("DatasetDomain"),
"SyncAllDatasets": obj.get("SyncAllDatasets"),
"ListofInputDatasets": obj.get("ListofInputDatasets"),
"FileNamesList": obj.get("FileNamesList"),
"FromTime": obj.get("FromTime"),
"ToTime": obj.get("ToTime"),
"Configuration": obj.get("Configuration"),
"ResourceIdentifier": obj.get("ResourceIdentifier"),
"Inputs": [DataPipelineNodesInnerInputsInner.from_dict(_item) for _item in obj["Inputs"]] if obj.get("Inputs") is not None else None,
"Outputs": [DataPipelineNodesInnerOutputsInner.from_dict(_item) for _item in obj["Outputs"]] if obj.get("Outputs") is not None else None,
"Conditions": [DataPipelineNodesInnerConditionsInner.from_dict(_item) for _item in obj["Conditions"]] if obj.get("Conditions") is not None else None
})
return _obj