# coding: utf-8
"""
Amorphic Data Platform
Amorphic Data Platform - API Definition documentation
The version of the OpenAPI document: 1.0
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictInt, StrictStr
from typing import Any, ClassVar, Dict, List, Optional
from openapi_client.models.datasource_details_datasource_config_cluster_config_auto_scaling_config import DatasourceDetailsDatasourceConfigClusterConfigAutoScalingConfig
from openapi_client.models.datasource_entity_details_entity_config_schedule_config import DatasourceEntityDetailsEntityConfigScheduleConfig
from typing import Optional, Set
from typing_extensions import Self
[docs]
class DatasourceEntityDetailsEntityConfig(BaseModel):
"""
DatasourceEntityDetailsEntityConfig
""" # noqa: E501
instance_multi_az: Optional[StrictBool] = Field(default=None, alias="InstanceMultiAZ")
auto_minor_version_upgrade: Optional[StrictBool] = Field(default=None, alias="AutoMinorVersionUpgrade")
preferred_maintenance_window: Optional[StrictStr] = Field(default=None, alias="PreferredMaintenanceWindow")
shared_instance: Optional[StrictStr] = Field(default=None, alias="SharedInstance")
instance_class: Optional[StrictStr] = Field(default=None, alias="InstanceClass")
allocated_storage: Optional[StrictInt] = Field(default=None, alias="AllocatedStorage")
instance_az: Optional[StrictStr] = Field(default=None, alias="InstanceAZ")
dms_version: Optional[StrictStr] = Field(default=None, alias="DmsVersion")
publicly_accessible_instance: Optional[StrictBool] = Field(default=None, alias="PubliclyAccessibleInstance")
lambda_handler: Optional[StrictStr] = Field(default=None, alias="LambdaHandler")
memory_size: Optional[StrictInt] = Field(default=None, alias="MemorySize")
cluster_size: Optional[StrictStr] = Field(default=None, alias="ClusterSize")
cluster_storage: Optional[StrictStr] = Field(default=None, alias="ClusterStorage")
number_of_brokers: Optional[StrictStr] = Field(default=None, alias="NumberOfBrokers")
list_of_consumers: Optional[List[Any]] = Field(default=None, alias="ListOfConsumers")
kafka_version: Optional[StrictStr] = Field(default=None, alias="KafkaVersion")
shared_cluster: Optional[StrictStr] = Field(default=None, alias="SharedCluster")
is_auto_scaling_enabled: Optional[StrictBool] = Field(default=None, alias="IsAutoScalingEnabled")
is_auto_terminate_enabled: Optional[StrictBool] = Field(default=None, alias="IsAutoTerminateEnabled")
data_retention_in_hours: Optional[StrictStr] = Field(default=None, alias="DataRetentionInHours")
auto_scaling_config: Optional[DatasourceDetailsDatasourceConfigClusterConfigAutoScalingConfig] = Field(default=None, alias="AutoScalingConfig")
schedule_config: Optional[DatasourceEntityDetailsEntityConfigScheduleConfig] = Field(default=None, alias="ScheduleConfig")
__properties: ClassVar[List[str]] = ["InstanceMultiAZ", "AutoMinorVersionUpgrade", "PreferredMaintenanceWindow", "SharedInstance", "InstanceClass", "AllocatedStorage", "InstanceAZ", "DmsVersion", "PubliclyAccessibleInstance", "LambdaHandler", "MemorySize", "ClusterSize", "ClusterStorage", "NumberOfBrokers", "ListOfConsumers", "KafkaVersion", "SharedCluster", "IsAutoScalingEnabled", "IsAutoTerminateEnabled", "DataRetentionInHours", "AutoScalingConfig", "ScheduleConfig"]
model_config = ConfigDict(
populate_by_name=True,
validate_assignment=True,
protected_namespaces=(),
)
[docs]
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.model_dump(by_alias=True))
[docs]
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
# TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
return json.dumps(self.to_dict())
[docs]
@classmethod
def from_json(cls, json_str: str) -> Optional[Self]:
"""Create an instance of DatasourceEntityDetailsEntityConfig from a JSON string"""
return cls.from_dict(json.loads(json_str))
[docs]
def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
excluded_fields: Set[str] = set([
])
_dict = self.model_dump(
by_alias=True,
exclude=excluded_fields,
exclude_none=True,
)
# override the default output from pydantic by calling `to_dict()` of auto_scaling_config
if self.auto_scaling_config:
_dict['AutoScalingConfig'] = self.auto_scaling_config.to_dict()
# override the default output from pydantic by calling `to_dict()` of schedule_config
if self.schedule_config:
_dict['ScheduleConfig'] = self.schedule_config.to_dict()
return _dict
[docs]
@classmethod
def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
"""Create an instance of DatasourceEntityDetailsEntityConfig from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return cls.model_validate(obj)
_obj = cls.model_validate({
"InstanceMultiAZ": obj.get("InstanceMultiAZ"),
"AutoMinorVersionUpgrade": obj.get("AutoMinorVersionUpgrade"),
"PreferredMaintenanceWindow": obj.get("PreferredMaintenanceWindow"),
"SharedInstance": obj.get("SharedInstance"),
"InstanceClass": obj.get("InstanceClass"),
"AllocatedStorage": obj.get("AllocatedStorage"),
"InstanceAZ": obj.get("InstanceAZ"),
"DmsVersion": obj.get("DmsVersion"),
"PubliclyAccessibleInstance": obj.get("PubliclyAccessibleInstance"),
"LambdaHandler": obj.get("LambdaHandler"),
"MemorySize": obj.get("MemorySize"),
"ClusterSize": obj.get("ClusterSize"),
"ClusterStorage": obj.get("ClusterStorage"),
"NumberOfBrokers": obj.get("NumberOfBrokers"),
"ListOfConsumers": obj.get("ListOfConsumers"),
"KafkaVersion": obj.get("KafkaVersion"),
"SharedCluster": obj.get("SharedCluster"),
"IsAutoScalingEnabled": obj.get("IsAutoScalingEnabled"),
"IsAutoTerminateEnabled": obj.get("IsAutoTerminateEnabled"),
"DataRetentionInHours": obj.get("DataRetentionInHours"),
"AutoScalingConfig": DatasourceDetailsDatasourceConfigClusterConfigAutoScalingConfig.from_dict(obj["AutoScalingConfig"]) if obj.get("AutoScalingConfig") is not None else None,
"ScheduleConfig": DatasourceEntityDetailsEntityConfigScheduleConfig.from_dict(obj["ScheduleConfig"]) if obj.get("ScheduleConfig") is not None else None
})
return _obj