# coding: utf-8
"""
Amorphic Data Platform
Amorphic Data Platform - API Definition documentation
The version of the OpenAPI document: 0.3.0
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import BaseModel, ConfigDict, Field, StrictStr
from typing import Any, ClassVar, Dict, List, Optional
from openapi_client.models.dataset_access import DatasetAccess
from openapi_client.models.user_resources_code_repositories_inner import UserResourcesCodeRepositoriesInner
from openapi_client.models.user_resources_dashboards_inner import UserResourcesDashboardsInner
from openapi_client.models.user_resources_data_pipelines_inner import UserResourcesDataPipelinesInner
from openapi_client.models.user_resources_data_quality_checks_inner import UserResourcesDataQualityChecksInner
from openapi_client.models.user_resources_datalab_lifecycle_configurations_inner import UserResourcesDatalabLifecycleConfigurationsInner
from openapi_client.models.user_resources_datalabs_inner import UserResourcesDatalabsInner
from openapi_client.models.user_resources_datasources_inner import UserResourcesDatasourcesInner
from openapi_client.models.user_resources_domain_names_inner import UserResourcesDomainNamesInner
from openapi_client.models.user_resources_etl_jobs_inner import UserResourcesEtlJobsInner
from openapi_client.models.user_resources_groups import UserResourcesGroups
from openapi_client.models.user_resources_models_inner import UserResourcesModelsInner
from openapi_client.models.user_resources_parameters import UserResourcesParameters
from openapi_client.models.user_resources_roles_inner import UserResourcesRolesInner
from openapi_client.models.user_resources_schedules_inner import UserResourcesSchedulesInner
from typing import Optional, Set
from typing_extensions import Self
[docs]
class UserResources(BaseModel):
"""
UserResources
""" # noqa: E501
dataset_access: Optional[DatasetAccess] = Field(default=None, alias="DatasetAccess")
schedules: Optional[List[UserResourcesSchedulesInner]] = Field(default=None, alias="Schedules")
etl_jobs: Optional[List[UserResourcesEtlJobsInner]] = Field(default=None, alias="EtlJobs")
datalabs: Optional[List[UserResourcesDatalabsInner]] = Field(default=None, alias="Datalabs")
models: Optional[List[UserResourcesModelsInner]] = Field(default=None, alias="Models")
dashboards: Optional[List[UserResourcesDashboardsInner]] = Field(default=None, alias="Dashboards")
roles: Optional[List[UserResourcesRolesInner]] = Field(default=None, alias="Roles")
groups: Optional[UserResourcesGroups] = Field(default=None, alias="Groups")
parameters: Optional[UserResourcesParameters] = Field(default=None, alias="Parameters")
shared_libraries: Optional[List[StrictStr]] = Field(default=None, alias="SharedLibraries")
data_pipelines: Optional[List[UserResourcesDataPipelinesInner]] = Field(default=None, alias="DataPipelines")
data_quality_checks: Optional[List[UserResourcesDataQualityChecksInner]] = Field(default=None, alias="DataQualityChecks")
domain_names: Optional[List[UserResourcesDomainNamesInner]] = Field(default=None, alias="DomainNames")
tenant_names: Optional[List[StrictStr]] = Field(default=None, alias="TenantNames")
datalab_lifecycle_configurations: Optional[List[UserResourcesDatalabLifecycleConfigurationsInner]] = Field(default=None, alias="DatalabLifecycleConfigurations")
code_repositories: Optional[List[UserResourcesCodeRepositoriesInner]] = Field(default=None, alias="CodeRepositories")
datasources: Optional[List[UserResourcesDatasourcesInner]] = Field(default=None, alias="Datasources")
__properties: ClassVar[List[str]] = ["DatasetAccess", "Schedules", "EtlJobs", "Datalabs", "Models", "Dashboards", "Roles", "Groups", "Parameters", "SharedLibraries", "DataPipelines", "DataQualityChecks", "DomainNames", "TenantNames", "DatalabLifecycleConfigurations", "CodeRepositories", "Datasources"]
model_config = ConfigDict(
populate_by_name=True,
validate_assignment=True,
protected_namespaces=(),
)
[docs]
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.model_dump(by_alias=True))
[docs]
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
# TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
return json.dumps(self.to_dict())
[docs]
@classmethod
def from_json(cls, json_str: str) -> Optional[Self]:
"""Create an instance of UserResources from a JSON string"""
return cls.from_dict(json.loads(json_str))
[docs]
def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
excluded_fields: Set[str] = set([
])
_dict = self.model_dump(
by_alias=True,
exclude=excluded_fields,
exclude_none=True,
)
# override the default output from pydantic by calling `to_dict()` of dataset_access
if self.dataset_access:
_dict['DatasetAccess'] = self.dataset_access.to_dict()
# override the default output from pydantic by calling `to_dict()` of each item in schedules (list)
_items = []
if self.schedules:
for _item_schedules in self.schedules:
if _item_schedules:
_items.append(_item_schedules.to_dict())
_dict['Schedules'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in etl_jobs (list)
_items = []
if self.etl_jobs:
for _item_etl_jobs in self.etl_jobs:
if _item_etl_jobs:
_items.append(_item_etl_jobs.to_dict())
_dict['EtlJobs'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in datalabs (list)
_items = []
if self.datalabs:
for _item_datalabs in self.datalabs:
if _item_datalabs:
_items.append(_item_datalabs.to_dict())
_dict['Datalabs'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in models (list)
_items = []
if self.models:
for _item_models in self.models:
if _item_models:
_items.append(_item_models.to_dict())
_dict['Models'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in dashboards (list)
_items = []
if self.dashboards:
for _item_dashboards in self.dashboards:
if _item_dashboards:
_items.append(_item_dashboards.to_dict())
_dict['Dashboards'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in roles (list)
_items = []
if self.roles:
for _item_roles in self.roles:
if _item_roles:
_items.append(_item_roles.to_dict())
_dict['Roles'] = _items
# override the default output from pydantic by calling `to_dict()` of groups
if self.groups:
_dict['Groups'] = self.groups.to_dict()
# override the default output from pydantic by calling `to_dict()` of parameters
if self.parameters:
_dict['Parameters'] = self.parameters.to_dict()
# override the default output from pydantic by calling `to_dict()` of each item in data_pipelines (list)
_items = []
if self.data_pipelines:
for _item_data_pipelines in self.data_pipelines:
if _item_data_pipelines:
_items.append(_item_data_pipelines.to_dict())
_dict['DataPipelines'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in data_quality_checks (list)
_items = []
if self.data_quality_checks:
for _item_data_quality_checks in self.data_quality_checks:
if _item_data_quality_checks:
_items.append(_item_data_quality_checks.to_dict())
_dict['DataQualityChecks'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in domain_names (list)
_items = []
if self.domain_names:
for _item_domain_names in self.domain_names:
if _item_domain_names:
_items.append(_item_domain_names.to_dict())
_dict['DomainNames'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in datalab_lifecycle_configurations (list)
_items = []
if self.datalab_lifecycle_configurations:
for _item_datalab_lifecycle_configurations in self.datalab_lifecycle_configurations:
if _item_datalab_lifecycle_configurations:
_items.append(_item_datalab_lifecycle_configurations.to_dict())
_dict['DatalabLifecycleConfigurations'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in code_repositories (list)
_items = []
if self.code_repositories:
for _item_code_repositories in self.code_repositories:
if _item_code_repositories:
_items.append(_item_code_repositories.to_dict())
_dict['CodeRepositories'] = _items
# override the default output from pydantic by calling `to_dict()` of each item in datasources (list)
_items = []
if self.datasources:
for _item_datasources in self.datasources:
if _item_datasources:
_items.append(_item_datasources.to_dict())
_dict['Datasources'] = _items
return _dict
[docs]
@classmethod
def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
"""Create an instance of UserResources from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return cls.model_validate(obj)
_obj = cls.model_validate({
"DatasetAccess": DatasetAccess.from_dict(obj["DatasetAccess"]) if obj.get("DatasetAccess") is not None else None,
"Schedules": [UserResourcesSchedulesInner.from_dict(_item) for _item in obj["Schedules"]] if obj.get("Schedules") is not None else None,
"EtlJobs": [UserResourcesEtlJobsInner.from_dict(_item) for _item in obj["EtlJobs"]] if obj.get("EtlJobs") is not None else None,
"Datalabs": [UserResourcesDatalabsInner.from_dict(_item) for _item in obj["Datalabs"]] if obj.get("Datalabs") is not None else None,
"Models": [UserResourcesModelsInner.from_dict(_item) for _item in obj["Models"]] if obj.get("Models") is not None else None,
"Dashboards": [UserResourcesDashboardsInner.from_dict(_item) for _item in obj["Dashboards"]] if obj.get("Dashboards") is not None else None,
"Roles": [UserResourcesRolesInner.from_dict(_item) for _item in obj["Roles"]] if obj.get("Roles") is not None else None,
"Groups": UserResourcesGroups.from_dict(obj["Groups"]) if obj.get("Groups") is not None else None,
"Parameters": UserResourcesParameters.from_dict(obj["Parameters"]) if obj.get("Parameters") is not None else None,
"SharedLibraries": obj.get("SharedLibraries"),
"DataPipelines": [UserResourcesDataPipelinesInner.from_dict(_item) for _item in obj["DataPipelines"]] if obj.get("DataPipelines") is not None else None,
"DataQualityChecks": [UserResourcesDataQualityChecksInner.from_dict(_item) for _item in obj["DataQualityChecks"]] if obj.get("DataQualityChecks") is not None else None,
"DomainNames": [UserResourcesDomainNamesInner.from_dict(_item) for _item in obj["DomainNames"]] if obj.get("DomainNames") is not None else None,
"TenantNames": obj.get("TenantNames"),
"DatalabLifecycleConfigurations": [UserResourcesDatalabLifecycleConfigurationsInner.from_dict(_item) for _item in obj["DatalabLifecycleConfigurations"]] if obj.get("DatalabLifecycleConfigurations") is not None else None,
"CodeRepositories": [UserResourcesCodeRepositoriesInner.from_dict(_item) for _item in obj["CodeRepositories"]] if obj.get("CodeRepositories") is not None else None,
"Datasources": [UserResourcesDatasourcesInner.from_dict(_item) for _item in obj["Datasources"]] if obj.get("Datasources") is not None else None
})
return _obj