Source code for openapi_client.models.data_pipelines_post_nodes_inner

# coding: utf-8

"""
    Amorphic Data Platform

    Amorphic Data Platform - API Definition documentation

    The version of the OpenAPI document: 1.0
    Generated by OpenAPI Generator (https://openapi-generator.tech)

    Do not edit the class manually.
"""  # noqa: E501


from __future__ import annotations
import pprint
import re  # noqa: F401
import json

from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictFloat, StrictInt, StrictStr
from typing import Any, ClassVar, Dict, List, Optional, Union
from openapi_client.models.data_pipeline_nodes_inner_conditions_inner import DataPipelineNodesInnerConditionsInner
from openapi_client.models.data_pipeline_nodes_inner_inputs_inner import DataPipelineNodesInnerInputsInner
from openapi_client.models.data_pipeline_nodes_inner_outputs_inner import DataPipelineNodesInnerOutputsInner
from openapi_client.models.data_pipeline_nodes_inner_resource import DataPipelineNodesInnerResource
from typing import Optional, Set
from typing_extensions import Self

[docs] class DataPipelinesPostNodesInner(BaseModel): """ DataPipelinesPostNodesInner """ # noqa: E501 module_type: StrictStr = Field(alias="ModuleType") source_dataset_id: Optional[StrictStr] = Field(default=None, alias="SourceDatasetId") source_language_id: Optional[StrictStr] = Field(default=None, alias="SourceLanguageId") target_language_id: Optional[StrictStr] = Field(default=None, alias="TargetLanguageId") target_dataset_id: Optional[StrictStr] = Field(default=None, alias="TargetDatasetId") file_processing_mode: Optional[StrictStr] = Field(default=None, alias="FileProcessingMode") node_name: StrictStr = Field(alias="NodeName") resource: Optional[DataPipelineNodesInnerResource] = Field(default=None, alias="Resource") concurrency_factor: Optional[Union[StrictFloat, StrictInt]] = Field(default=None, alias="ConcurrencyFactor") dataset_domain: Optional[StrictStr] = Field(default=None, alias="DatasetDomain") sync_all_datasets: Optional[StrictBool] = Field(default=None, alias="SyncAllDatasets") list_of_input_datasets: Optional[List[StrictStr]] = Field(default=None, alias="ListOfInputDatasets") arguments: Optional[Dict[str, Any]] = Field(default=None, alias="Arguments") features: Optional[List[StrictStr]] = Field(default=None, alias="Features") email_body_execution_property_key: Optional[StrictStr] = Field(default=None, alias="EmailBodyExecutionPropertyKey") email_subject_execution_property_key: Optional[StrictStr] = Field(default=None, alias="EmailSubjectExecutionPropertyKey") email_to_execution_property_key: Optional[StrictStr] = Field(default=None, alias="EmailToExecutionPropertyKey") timeout: Optional[StrictInt] = Field(default=None, alias="Timeout") agent_type: Optional[StrictStr] = Field(default=None, alias="AgentType") model_id: Optional[StrictStr] = Field(default=None, alias="ModelId") dataset_processing_mode: Optional[StrictStr] = Field(default=None, alias="DatasetProcessingMode") file_names_list: Optional[List[StrictStr]] = Field(default=None, alias="FileNamesList") columns_to_visualise: Optional[List[StrictStr]] = Field(default=None, alias="ColumnsToVisualise") prompt: Optional[StrictStr] = Field(default=None, alias="Prompt") ingestion_type: Optional[StrictStr] = Field(default=None, alias="IngestionType") node_instance: Optional[StrictStr] = Field(default=None, alias="NodeInstance") from_time: Optional[StrictStr] = Field(default=None, alias="FromTime") to_time: Optional[StrictStr] = Field(default=None, alias="ToTime") configuration: Optional[Dict[str, Any]] = Field(default=None, alias="Configuration") resource_identifier: Optional[StrictStr] = Field(default=None, description="Resource identifier for nodes that require external resources (e.g., KnowledgeBase, LambdaFunction)", alias="ResourceIdentifier") inputs: Optional[List[DataPipelineNodesInnerInputsInner]] = Field(default=None, description="Input configurations for AI Flow nodes. Each input has Name and Type.", alias="Inputs") outputs: Optional[List[DataPipelineNodesInnerOutputsInner]] = Field(default=None, description="Output configurations for AI Flow nodes. Each output has Name and Type.", alias="Outputs") conditions: Optional[List[DataPipelineNodesInnerConditionsInner]] = Field(default=None, description="Condition configurations for Condition nodes. Each condition has Name and optional Expression (default condition doesn't require Expression).", alias="Conditions") __properties: ClassVar[List[str]] = ["ModuleType", "SourceDatasetId", "SourceLanguageId", "TargetLanguageId", "TargetDatasetId", "FileProcessingMode", "NodeName", "Resource", "ConcurrencyFactor", "DatasetDomain", "SyncAllDatasets", "ListOfInputDatasets", "Arguments", "Features", "EmailBodyExecutionPropertyKey", "EmailSubjectExecutionPropertyKey", "EmailToExecutionPropertyKey", "Timeout", "AgentType", "ModelId", "DatasetProcessingMode", "FileNamesList", "ColumnsToVisualise", "Prompt", "IngestionType", "NodeInstance", "FromTime", "ToTime", "Configuration", "ResourceIdentifier", "Inputs", "Outputs", "Conditions"] model_config = ConfigDict( populate_by_name=True, validate_assignment=True, protected_namespaces=(), )
[docs] def to_str(self) -> str: """Returns the string representation of the model using alias""" return pprint.pformat(self.model_dump(by_alias=True))
[docs] def to_json(self) -> str: """Returns the JSON representation of the model using alias""" # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead return json.dumps(self.to_dict())
[docs] @classmethod def from_json(cls, json_str: str) -> Optional[Self]: """Create an instance of DataPipelinesPostNodesInner from a JSON string""" return cls.from_dict(json.loads(json_str))
[docs] def to_dict(self) -> Dict[str, Any]: """Return the dictionary representation of the model using alias. This has the following differences from calling pydantic's `self.model_dump(by_alias=True)`: * `None` is only added to the output dict for nullable fields that were set at model initialization. Other fields with value `None` are ignored. """ excluded_fields: Set[str] = set([ ]) _dict = self.model_dump( by_alias=True, exclude=excluded_fields, exclude_none=True, ) # override the default output from pydantic by calling `to_dict()` of resource if self.resource: _dict['Resource'] = self.resource.to_dict() # override the default output from pydantic by calling `to_dict()` of each item in inputs (list) _items = [] if self.inputs: for _item_inputs in self.inputs: if _item_inputs: _items.append(_item_inputs.to_dict()) _dict['Inputs'] = _items # override the default output from pydantic by calling `to_dict()` of each item in outputs (list) _items = [] if self.outputs: for _item_outputs in self.outputs: if _item_outputs: _items.append(_item_outputs.to_dict()) _dict['Outputs'] = _items # override the default output from pydantic by calling `to_dict()` of each item in conditions (list) _items = [] if self.conditions: for _item_conditions in self.conditions: if _item_conditions: _items.append(_item_conditions.to_dict()) _dict['Conditions'] = _items return _dict
[docs] @classmethod def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: """Create an instance of DataPipelinesPostNodesInner from a dict""" if obj is None: return None if not isinstance(obj, dict): return cls.model_validate(obj) _obj = cls.model_validate({ "ModuleType": obj.get("ModuleType"), "SourceDatasetId": obj.get("SourceDatasetId"), "SourceLanguageId": obj.get("SourceLanguageId"), "TargetLanguageId": obj.get("TargetLanguageId"), "TargetDatasetId": obj.get("TargetDatasetId"), "FileProcessingMode": obj.get("FileProcessingMode"), "NodeName": obj.get("NodeName"), "Resource": DataPipelineNodesInnerResource.from_dict(obj["Resource"]) if obj.get("Resource") is not None else None, "ConcurrencyFactor": obj.get("ConcurrencyFactor"), "DatasetDomain": obj.get("DatasetDomain"), "SyncAllDatasets": obj.get("SyncAllDatasets"), "ListOfInputDatasets": obj.get("ListOfInputDatasets"), "Arguments": obj.get("Arguments"), "Features": obj.get("Features"), "EmailBodyExecutionPropertyKey": obj.get("EmailBodyExecutionPropertyKey"), "EmailSubjectExecutionPropertyKey": obj.get("EmailSubjectExecutionPropertyKey"), "EmailToExecutionPropertyKey": obj.get("EmailToExecutionPropertyKey"), "Timeout": obj.get("Timeout"), "AgentType": obj.get("AgentType"), "ModelId": obj.get("ModelId"), "DatasetProcessingMode": obj.get("DatasetProcessingMode"), "FileNamesList": obj.get("FileNamesList"), "ColumnsToVisualise": obj.get("ColumnsToVisualise"), "Prompt": obj.get("Prompt"), "IngestionType": obj.get("IngestionType"), "NodeInstance": obj.get("NodeInstance"), "FromTime": obj.get("FromTime"), "ToTime": obj.get("ToTime"), "Configuration": obj.get("Configuration"), "ResourceIdentifier": obj.get("ResourceIdentifier"), "Inputs": [DataPipelineNodesInnerInputsInner.from_dict(_item) for _item in obj["Inputs"]] if obj.get("Inputs") is not None else None, "Outputs": [DataPipelineNodesInnerOutputsInner.from_dict(_item) for _item in obj["Outputs"]] if obj.get("Outputs") is not None else None, "Conditions": [DataPipelineNodesInnerConditionsInner.from_dict(_item) for _item in obj["Conditions"]] if obj.get("Conditions") is not None else None }) return _obj